From 1a8f11a3fea6865e95dd47df5ed79125190475d2 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 8 Sep 2024 20:39:39 -0700 Subject: [PATCH 01/34] Improve schema registry --- Makefile | 6 + client.go | 15 - client_test.go | 53 ++- config.go | 14 +- example/compose.yaml | 13 + example/producer_avro/dummy_event.avsc | 13 + example/producer_avro/dummy_event_gen.go | 31 ++ example/producer_avro/go.mod | 26 ++ example/producer_avro/go.sum | 489 +++++++++++++++++++++++ example/producer_avro/main.go | 49 +++ example/worker_avro/bla.go | 12 + example/worker_avro/dummy_event.avsc | 11 + example/worker_avro/go.mod | 22 + example/worker_avro/go.sum | 411 +++++++++++++++++++ example/worker_avro/main.go | 97 +++++ formatter.go | 129 +++++- formatter_test.go | 2 +- go.mod | 5 + go.sum | 80 ++++ message.go | 40 +- reader.go | 23 +- writer.go | 52 ++- 22 files changed, 1523 insertions(+), 70 deletions(-) create mode 100644 example/producer_avro/dummy_event.avsc create mode 100644 example/producer_avro/dummy_event_gen.go create mode 100644 example/producer_avro/go.mod create mode 100644 example/producer_avro/go.sum create mode 100644 example/producer_avro/main.go create mode 100644 example/worker_avro/bla.go create mode 100644 example/worker_avro/dummy_event.avsc create mode 100644 example/worker_avro/go.mod create mode 100644 example/worker_avro/go.sum create mode 100644 example/worker_avro/main.go diff --git a/Makefile b/Makefile index 1feb319..aa300e0 100644 --- a/Makefile +++ b/Makefile @@ -41,3 +41,9 @@ golangci-lint: (cd $(mod) && \ echo "[lint] golangci-lint: $(mod)" && \ golangci-lint run --path-prefix $(mod) ./...) &&) true + +.PHONY: gen +gen: + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p main -d ./example/producer_avro ./example/producer_avro/dummy_event.avsc + #go run github.com/hamba/avro/v2/cmd/avrogen@v2.25.1 -pkg main -o ./example/producer_avro/bla.go -tags json:snake,yaml:upper-camel ./example/producer_avro/dummy_event.avsc + go run github.com/hamba/avro/v2/cmd/avrogen@v2.25.1 -pkg main -o ./example/worker_avro/bla.go -tags json:snake,yaml:upper-camel ./example/worker_avro/dummy_event.avsc diff --git a/client.go b/client.go index c6f0b93..d9c5c12 100644 --- a/client.go +++ b/client.go @@ -4,10 +4,8 @@ package zkafka import ( "context" - "fmt" "sync" - "github.com/zillow/zfmt" "go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/trace" ) @@ -132,19 +130,6 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts return c.writers[topicConfig.ClientID], nil } -func getFormatter(topicConfig TopicConfig) (zfmt.Formatter, error) { - switch topicConfig.GetFormatter() { - case CustomFmt: - return &noopFormatter{}, nil - default: - f, err := zfmt.GetFormatter(topicConfig.GetFormatter(), topicConfig.GetSchemaID()) - if err != nil { - return nil, fmt.Errorf("unsupported formatter %s", topicConfig.GetFormatter()) - } - return f, nil - } -} - // Close terminates all cached readers and writers gracefully. func (c *Client) Close() error { c.mu.Lock() diff --git a/client_test.go b/client_test.go index d384bc4..4f3e75e 100644 --- a/client_test.go +++ b/client_test.go @@ -516,9 +516,9 @@ func TestClient_Close(t *testing.T) { } } -func Test_getFormatter(t *testing.T) { +func Test_getFormatter_Consumer(t *testing.T) { type args struct { - topicConfig TopicConfig + topicConfig ConsumerTopicConfig } tests := []struct { name string @@ -555,12 +555,6 @@ func Test_getFormatter(t *testing.T) { want: &zfmt.AvroFormatter{}, wantErr: false, }, - { - name: "confluent avro with schema ClientID", - args: args{topicConfig: ProducerTopicConfig{Formatter: zfmt.FormatterType("avro_schema")}}, - want: &zfmt.SchematizedAvroFormatter{}, - wantErr: false, - }, { name: "confluent avro with inferred schema ClientID", args: args{topicConfig: ConsumerTopicConfig{Formatter: zfmt.FormatterType("avro_schema"), SchemaID: 10}}, @@ -585,22 +579,53 @@ func Test_getFormatter(t *testing.T) { want: &zfmt.SchematizedProtoFormatterDeprecated{}, wantErr: false, }, + { + name: "unsupported", + args: args{topicConfig: ConsumerTopicConfig{Formatter: zfmt.FormatterType("what"), SchemaID: 10}}, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + defer recoverThenFail(t) + got, _, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID, SchemaRegistryConfig{}) + if tt.wantErr { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, got) + } + }) + } +} + +func Test_getFormatter_Producer(t *testing.T) { + type args struct { + topicConfig ProducerTopicConfig + } + tests := []struct { + name string + args args + want zfmt.Formatter + wantErr bool + }{ + { + name: "confluent avro with schema ClientID", + args: args{topicConfig: ProducerTopicConfig{Formatter: zfmt.FormatterType("avro_schema")}}, + want: &zfmt.SchematizedAvroFormatter{}, + wantErr: false, + }, { name: "confluent json with inferred schema ID", args: args{topicConfig: ProducerTopicConfig{Formatter: zfmt.FormatterType("proto_schema_deprecated"), SchemaID: 10}}, want: &zfmt.SchematizedProtoFormatterDeprecated{SchemaID: 10}, wantErr: false, }, - { - name: "unsupported", - args: args{topicConfig: ConsumerTopicConfig{Formatter: zfmt.FormatterType("what"), SchemaID: 10}}, - wantErr: true, - }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got, err := getFormatter(tt.args.topicConfig) + got, _, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID, SchemaRegistryConfig{}) if tt.wantErr { require.Error(t, err) } else { diff --git a/config.go b/config.go index c84ffa8..49e471e 100644 --- a/config.go +++ b/config.go @@ -78,6 +78,8 @@ type ConsumerTopicConfig struct { // Formatter is json if not defined Formatter zfmt.FormatterType + SchemaRegistry SchemaRegistryConfig + // SchemaID defines the schema registered with Confluent Schema Registry // Default value is 0, and it implies that both Writer and Reader do not care about schema validation // and should encode/decode the message based on data type provided. @@ -172,6 +174,8 @@ type ProducerTopicConfig struct { // Formatter is json if not defined Formatter zfmt.FormatterType + SchemaRegistry SchemaRegistryConfig + // SchemaID defines the schema registered with Confluent Schema Registry // Default value is 0, and it implies that both Writer and Reader do not care about schema validation // and should encode/decode the message based on data type provided. @@ -207,9 +211,13 @@ func (p ProducerTopicConfig) GetSchemaID() int { return p.SchemaID } -type TopicConfig interface { - GetFormatter() zfmt.FormatterType - GetSchemaID() int +type SchemaRegistryConfig struct { + URL string + Serialization struct { + AutoRegisterSchemas bool + } + Deserialization struct { + } } func getDefaultConsumerTopicConfig(topicConfig *ConsumerTopicConfig) error { diff --git a/example/compose.yaml b/example/compose.yaml index bd28a32..52d7b61 100644 --- a/example/compose.yaml +++ b/example/compose.yaml @@ -22,3 +22,16 @@ services: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + schemaregistry: + image: confluentinc/cp-schema-registry:5.3.2 + depends_on: + - kafka + - zookeeper + ports: + - "8081:8081" + environment: + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper:2181 + SCHEMA_REGISTRY_HOST_NAME: schemaregistry + SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 + SCHEMA_REGISTRY_DEBUG: true + diff --git a/example/producer_avro/dummy_event.avsc b/example/producer_avro/dummy_event.avsc new file mode 100644 index 0000000..1f4112d --- /dev/null +++ b/example/producer_avro/dummy_event.avsc @@ -0,0 +1,13 @@ +{ + "type": "record", + "name": "DummyEvent", + "fields": [ + {"name": "IntField", "type": "int"}, + {"name": "DoubleField", "type": "double"}, + {"name": "StringField", "type": "string"}, + {"name": "BoolField", "type": "boolean"}, + {"name": "BytesField", "type": "bytes"}, + {"name": "NewFieldWithDefault", "type": ["null", "string"], "default": null }, + {"name": "NewFieldWithDefault2", "type": ["null", "string"], "default": null } + ] +} \ No newline at end of file diff --git a/example/producer_avro/dummy_event_gen.go b/example/producer_avro/dummy_event_gen.go new file mode 100644 index 0000000..083961e --- /dev/null +++ b/example/producer_avro/dummy_event_gen.go @@ -0,0 +1,31 @@ +// Code generated by avrogen. DO NOT EDIT. + +package main + +import ( + "github.com/heetch/avro/avrotypegen" +) + +type DummyEvent struct { + IntField int + DoubleField float64 + StringField string + BoolField bool + BytesField []byte + NewFieldWithDefault *string + NewFieldWithDefault2 *string +} + +// AvroRecord implements the avro.AvroRecord interface. +func (DummyEvent) AvroRecord() avrotypegen.RecordInfo { + return avrotypegen.RecordInfo{ + Schema: `{"fields":[{"name":"IntField","type":"int"},{"name":"DoubleField","type":"double"},{"name":"StringField","type":"string"},{"name":"BoolField","type":"boolean"},{"name":"BytesField","type":"bytes"},{"default":null,"name":"NewFieldWithDefault","type":["null","string"]},{"default":null,"name":"NewFieldWithDefault2","type":["null","string"]}],"name":"DummyEvent","type":"record"}`, + Required: []bool{ + 0: true, + 1: true, + 2: true, + 3: true, + 4: true, + }, + } +} diff --git a/example/producer_avro/go.mod b/example/producer_avro/go.mod new file mode 100644 index 0000000..818a939 --- /dev/null +++ b/example/producer_avro/go.mod @@ -0,0 +1,26 @@ +module github.com/zillow/zkafka/example/producer_avro + +go 1.23.1 + +replace github.com/zillow/zkafka v1.0.0 => ../.. + + +require ( + github.com/zillow/zkafka v1.0.0 + github.com/actgardner/gogen-avro/v10 v10.2.1 // indirect + github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/hamba/avro/v2 v2.20.1 // indirect + github.com/heetch/avro v0.4.5 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/sony/gobreaker v1.0.0 // indirect + github.com/zillow/zfmt v1.0.1 // indirect + go.opentelemetry.io/otel v1.28.0 // indirect + go.opentelemetry.io/otel/trace v1.28.0 // indirect + golang.org/x/sync v0.7.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect +) diff --git a/example/producer_avro/go.sum b/example/producer_avro/go.sum new file mode 100644 index 0000000..e8d3c72 --- /dev/null +++ b/example/producer_avro/go.sum @@ -0,0 +1,489 @@ +cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU= +cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= +github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0 h1:DRiANoJTiW6obBQe3SqZizkuV1PEgfiiGivmVocDy64= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0/go.mod h1:qLIye2hwb/ZouqhpSD9Zn3SJipvpEnz1Ywl3VUk9Y0s= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= +github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7fz8= +github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= +github.com/actgardner/gogen-avro/v10 v10.2.1 h1:z3pOGblRjAJCYpkIJ8CmbMJdksi4rAhaygw0dyXZ930= +github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= +github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= +github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= +github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= +github.com/aws/aws-sdk-go-v2/config v1.27.10/go.mod h1:BePM7Vo4OBpHreKRUMuDXX+/+JWP38FLkzl5m27/Jjs= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10 h1:qDZ3EA2lv1KangvQB6y258OssCHD0xvaGiEDkG4X/10= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10/go.mod h1:6t3sucOaYDwDssHQa0ojH1RpmVmF5/jArkye1b2FKMI= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 h1:FVJ0r5XTHSmIHJV6KuDmdYhEpvlHpiSd38RQWhut5J4= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1/go.mod h1:zusuAeqezXzAB24LGuzuekqMAEgWkVYukBec3kr3jUg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1 h1:SBn4I0fJXF9FYOVRSVMWuhvEKoAHDikjGpS3wlmw5DE= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1/go.mod h1:2snWQJQUKsbN66vAawJuOGX7dr37pfOq9hb0tZDGIqQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4/go.mod h1:mUYPBhaF2lGiukDEjJX2BLRRKTmoUSitGDUgM4tRxak= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n2HZPkcKgPAi1phU= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= +github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= +github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= +github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/cenkalti/backoff/v3 v3.0.0 h1:ske+9nBpD9qZsTBoF41nW5L+AIuFBKMeze18XQ3eG1c= +github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/compose-spec/compose-go/v2 v2.1.0 h1:qdW2qISQlCQG8v1O2TChcdxgAWTUGgUX/CPSO+ES9+E= +github.com/compose-spec/compose-go/v2 v2.1.0/go.mod h1:bEPizBkIojlQ20pi2vNluBa58tevvj0Y18oUSHPyfdc= +github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 h1:PM18lA9g6u6Qcz06DpXmGRlxXTvWlHqnlAkQi1chPUo= +github.com/confluentinc/confluent-kafka-go/v2 v2.5.0/go.mod h1:Hyo+IIQ/tmsfkOcRP8T6VlSeOW3T33v0Me8Xvq4u90Y= +github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro= +github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk= +github.com/containerd/containerd v1.7.15 h1:afEHXdil9iAm03BmhjzKyXnnEBtjaLJefdU7DV0IFes= +github.com/containerd/containerd v1.7.15/go.mod h1:ISzRRTMF8EXNpJlTzyr2XMhN+j9K302C21/+cr3kUnY= +github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8= +github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/ttrpc v1.2.3 h1:4jlhbXIGvijRtNC8F/5CpuJZ7yKOBFGFOOXg1bkISz0= +github.com/containerd/ttrpc v1.2.3/go.mod h1:ieWsXucbb8Mj9PH0rXCw1i8IunRbbAiDkpXkbfflWBM= +github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4= +github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/buildx v0.14.0 h1:FxqcfE7xgeEC4oQlKLpuvfobRDVDXrHE3jByM+mdyqk= +github.com/docker/buildx v0.14.0/go.mod h1:Vy/2lC9QsJvo33+7KKkN/GDE5WxnVqW0/dpcN7ZqPJY= +github.com/docker/cli v26.1.0+incompatible h1:+nwRy8Ocd8cYNQ60mozDDICICD8aoFGtlPXifX/UQ3Y= +github.com/docker/cli v26.1.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/compose/v2 v2.27.0 h1:FKyClQdErCxUZULC2zo6Jn5ve+epFPe/Y0HaxjmUzNg= +github.com/docker/compose/v2 v2.27.0/go.mod h1:uaqwmY6haO8wXWHk+LAsqqDapX6boH4izRKqj/E7+Bo= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v26.1.0+incompatible h1:W1G9MPNbskA6VZWL7b3ZljTh0pXI68FpINx0GKaOdaM= +github.com/docker/docker v26.1.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.8.0 h1:YQFtbBQb4VrpoPxhFuzEBPQ9E16qz5SpHLS+uswaCp8= +github.com/docker/docker-credential-helpers v0.8.0/go.mod h1:UGFXcuoQ5TxPiB54nHOZ32AWRqQdECoh/Mg0AlEYb40= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8= +github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJtLmY22n99HaZTz+r2Z51xUPi01m3wg= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg= +github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g= +github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/frankban/quicktest v1.14.0 h1:+cqqvzZV87b4adx/5ayVOaYZ2CrvM4ejQvUdBzPPUss= +github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= +github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvnkw= +github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= +github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= +github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= +github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= +github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE= +github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= +github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/cel-go v0.20.1 h1:nDx9r8S3L4pE61eDdt8igGj8rf5kjYR3ILxWIpWNi84= +github.com/google/cel-go v0.20.1/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg= +github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= +github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA= +github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hamba/avro/v2 v2.20.1 h1:3WByQiVn7wT7d27WQq6pvBRC00FVOrniP6u67FLA/2E= +github.com/hamba/avro/v2 v2.20.1/go.mod h1:xHiKXbISpb3Ovc809XdzWow+XGTn+Oyf/F9aZbTLAig= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.7.5 h1:bJj+Pj19UZMIweq/iie+1u5YCdGrnxCT9yvm0e+Nd5M= +github.com/hashicorp/go-retryablehttp v0.7.5/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6 h1:om4Al8Oy7kCm/B86rLCLah4Dt5Aa0Fr5rYBG60OzwHQ= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= +github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= +github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= +github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/vault/api v1.12.1 h1:WzGN4X5jrJdNO39g6Sa55djNio3I9DxEBOTmCZE7tm0= +github.com/hashicorp/vault/api v1.12.1/go.mod h1:1pqP/sErScodde+ybJCyP+ONC4jzEg7Dmawg/QLWo1k= +github.com/heetch/avro v0.4.5 h1:BSnj4wEeUG1IjMTm9/tBwQnV3euuIVa1mRWHnm1t8VU= +github.com/heetch/avro v0.4.5/go.mod h1:gxf9GnbjTXmWmqxhdNbAMcZCjpye7RV5r9t3Q0dL6ws= +github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= +github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= +github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY= +github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= +github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= +github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/buildkit v0.13.1 h1:L8afOFhPq2RPJJSr/VyzbufwID7jquZVB7oFHbPRcPE= +github.com/moby/buildkit v0.13.1/go.mod h1:aNmNQKLBFYAOFuzQjR3VA27/FijlvtBD1pjNwTSN37k= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg= +github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= +github.com/moby/sys/mountinfo v0.7.1 h1:/tTvQaSJRr2FshkhXiIpux6fQ2Zvc4j7tAhMTStAG2g= +github.com/moby/sys/mountinfo v0.7.1/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI= +github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= +github.com/moby/sys/symlink v0.2.0 h1:tk1rOM+Ljp0nFmfOIBtlV3rTDlWOwFRhjEeAhZB0nZc= +github.com/moby/sys/symlink v0.2.0/go.mod h1:7uZVF2dqJjG/NsClqul95CqKOBRQyYSNnJ6BMgR/gFs= +github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= +github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= +github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc h1:zAsgcP8MhzAbhMnB1QQ2O7ZhWYVGYSR2iVcjzQuPV+o= +github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc/go.mod h1:S8xSOnV3CgpNrWd0GQ/OoQfMtlg2uPRSuTzcSGrzwK8= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= +github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= +github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= +github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc= +github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= +github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= +github.com/sony/gobreaker v1.0.0 h1:feX5fGGXSl3dYd4aHZItw+FpHLvvoaqkawKjVNiFMNQ= +github.com/sony/gobreaker v1.0.0/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= +github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/testcontainers/testcontainers-go v0.31.0 h1:W0VwIhcEVhRflwL9as3dhY6jXjVCA27AkmbnZ+UTh3U= +github.com/testcontainers/testcontainers-go v0.31.0/go.mod h1:D2lAoA0zUFiSY+eAflqK5mcUx/A5hrrORaEQrd0SefI= +github.com/testcontainers/testcontainers-go/modules/compose v0.31.0 h1:H74o3HisnApIDQx7sWibGzOl/Oo0By8DjyVeUf3qd6I= +github.com/testcontainers/testcontainers-go/modules/compose v0.31.0/go.mod h1:z1JAsvL2/pNFy40yJX0VX9Yk+hzOCIO5DydxBJHBbCY= +github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c= +github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0 h1:A/2tIdYXqUuVZeWy0Yq/PWKsXgebzMyh5mLbpNEMVUo= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0/go.mod h1:QXPc/i5yUEWWZ4lbe2WOam1kDdrXjGHRjl0Lzo7IQDU= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0 h1:REG5YX2omhgPmiIT7GLqmzWFnIksZsog1FHJ+Pi1xJE= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0/go.mod h1:OJLS+EYJo/BTViJj7EBG5deKLeQfYwVNW8HMS1qHAAo= +github.com/tink-crypto/tink-go/v2 v2.1.0 h1:QXFBguwMwTIaU17EgZpEJWsUSc60b1BAGTzBIoMdmok= +github.com/tink-crypto/tink-go/v2 v2.1.0/go.mod h1:y1TnYFt1i2eZVfx4OGc+C+EMp4CoKWAw2VSEuoicHHI= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/tonistiigi/fsutil v0.0.0-20240301111122-7525a1af2bb5 h1:oZS8KCqAg62sxJkEq/Ppzqrb6EooqzWtL8Oaex7bc5c= +github.com/tonistiigi/fsutil v0.0.0-20240301111122-7525a1af2bb5/go.mod h1:vbbYqJlnswsbJqWUcJN8fKtBhnEgldDrcagTgnBVKKM= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= +github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs= +github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiatechs/jsonata-go v1.8.5 h1:m1NaokPKD6LPaTPRl674EQz5mpkJvM3ymjdReDEP6/A= +github.com/xiatechs/jsonata-go v1.8.5/go.mod h1:yGEvviiftcdVfhSRhRSpgyTel89T58f+690iB0fp2Vk= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zillow/zfmt v1.0.1 h1:JLN5WaxoqqoEPUpVWer83uhXhDPAA2nZkfQqgKnWp+w= +github.com/zillow/zfmt v1.0.1/go.mod h1:0PpKh4rWh+5Ghr2bbuN5UvEcqEz6PkHfE0Idgjyxy7Y= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1 h1:gbhw/u49SS3gkPWiYweQNJGm/uJN5GkI/FrosxSHT7A= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1/go.mod h1:GnOaBaFQ2we3b9AGWJpsBa7v1S5RlQzlC3O7dRMxZhM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= +go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0/go.mod h1:UVAO61+umUsHLtYb8KXXRoHtxUkdOPkYidzW3gipRLQ= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0 h1:wNMDy/LVGLj2h3p6zg4d0gypKfWKSWI14E1C4smOgl8= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0/go.mod h1:YfbDdXAAkemWJK3H/DshvlrxqFB2rtW4rY6ky/3x/H0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 h1:cl5P5/GIfFh4t6xyruOgJP5QiA1pw4fYYdv6nc6CBWw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0/go.mod h1:zgBdWWAu7oEEMC06MMKc5NLbA/1YDXV1sMpSqEeLQLg= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0 h1:tIqheXEFWAZ7O8A7m+J0aPTmpJN3YQ7qetUAdkkkKpk= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0/go.mod h1:nUeKExfxAQVbiVFn32YXpXZZHZ61Cc3s3Rn1pDBGAb0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkEZCJWobwBqMwC0cwCq8/wkkRy/OowZg5OArWZrM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I= +go.opentelemetry.io/otel/exporters/prometheus v0.42.0 h1:jwV9iQdvp38fxXi8ZC+lNpxjK16MRcZlpDYvbuO1FiA= +go.opentelemetry.io/otel/exporters/prometheus v0.42.0/go.mod h1:f3bYiqNqhoPxkvI2LrXqQVC546K7BuRDL/kKuxkujhA= +go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= +go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= +go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= +go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= +go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0= +go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q= +go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= +go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= +go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= +go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= +go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= +golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o= +golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ= +golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY= +google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= +google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0= +google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa/go.mod h1:CnZenrTdRJb7jc+jOm0Rkywq+9wh0QC4U8tyiRbEPPM= +google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:RFiFrvy37/mpSpdySBDrUdipW/dHwsRwh3J3+A9VgT4= +google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 h1:NnYq6UN9ReLM9/Y01KWNOWyI5xQ9kbIms5GGJVwS/Yc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= +google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= +google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y= +gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +k8s.io/api v0.29.2 h1:hBC7B9+MU+ptchxEqTNW2DkUosJpp1P+Wn6YncZ474A= +k8s.io/api v0.29.2/go.mod h1:sdIaaKuU7P44aoyyLlikSLayT6Vb7bvJNCX105xZXY0= +k8s.io/apimachinery v0.29.2 h1:EWGpfJ856oj11C52NRCHuU7rFDwxev48z+6DSlGNsV8= +k8s.io/apimachinery v0.29.2/go.mod h1:6HVkd1FwxIagpYrHSwJlQqZI3G9LfYWRPAkUvLnXTKU= +k8s.io/apiserver v0.29.2 h1:+Z9S0dSNr+CjnVXQePG8TcBWHr3Q7BmAr7NraHvsMiQ= +k8s.io/apiserver v0.29.2/go.mod h1:B0LieKVoyU7ykQvPFm7XSdIHaCHSzCzQWPFa5bqbeMQ= +k8s.io/client-go v0.29.2 h1:FEg85el1TeZp+/vYJM7hkDlSTFZ+c5nnK44DJ4FyoRg= +k8s.io/client-go v0.29.2/go.mod h1:knlvFZE58VpqbQpJNbCbctTVXcd35mMyAAwBdpt4jrA= +k8s.io/klog/v2 v2.110.1 h1:U/Af64HJf7FcwMcXyKm2RPM22WZzyR7OSpYj5tg3cL0= +k8s.io/klog/v2 v2.110.1/go.mod h1:YGtd1984u+GgbuZ7e08/yBuAfKLSO0+uR1Fhi6ExXjo= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 h1:aVUu9fTY98ivBPKR9Y5w/AuzbMm96cd3YHRTU83I780= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00/go.mod h1:AsvuZPBlUDVuCdzJ87iajxtXuR9oktsTctW/R9wwouA= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= +tags.cncf.io/container-device-interface v0.6.2 h1:dThE6dtp/93ZDGhqaED2Pu374SOeUkBfuvkLuiTdwzg= +tags.cncf.io/container-device-interface v0.6.2/go.mod h1:Shusyhjs1A5Na/kqPVLL0KqnHQHuunol9LFeUNkuGVE= diff --git a/example/producer_avro/main.go b/example/producer_avro/main.go new file mode 100644 index 0000000..4648f1d --- /dev/null +++ b/example/producer_avro/main.go @@ -0,0 +1,49 @@ +package main + +import ( + "context" + _ "embed" + "log" + "math/rand" + "time" + + "github.com/zillow/zkafka" +) + +//go:embed dummy_event.avsc +var dummyEventSchema string + +func main() { + ctx := context.Background() + writer, err := zkafka.NewClient(zkafka.Config{ + BootstrapServers: []string{"localhost:29092"}, + }).Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: "example", + Topic: "zkafka-example-topic", + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: struct{ AutoRegisterSchemas bool }{ + AutoRegisterSchemas: true, + }, + }, + }) + randomNames := []string{"stewy", "lydia", "asif", "mike", "justin"} + if err != nil { + log.Panic(err) + } + for { + event := DummyEvent{ + IntField: rand.Intn(100), + StringField: randomNames[rand.Intn(len(randomNames))], + } + + resp, err := writer.Write(ctx, event, zkafka.WithAvroSchema(dummyEventSchema)) + //resp, err := writer.Write(ctx, &event) + if err != nil { + log.Panic(err) + } + log.Printf("resp: %+v\n", resp) + time.Sleep(time.Second) + } +} diff --git a/example/worker_avro/bla.go b/example/worker_avro/bla.go new file mode 100644 index 0000000..c5f212b --- /dev/null +++ b/example/worker_avro/bla.go @@ -0,0 +1,12 @@ +package main + +// Code generated by avro/gen. DO NOT EDIT. + +// DummyEvent is a generated struct. +type DummyEvent struct { + IntField int `avro:"IntField" json:"int_field" yaml:"IntField"` + DoubleField float64 `avro:"DoubleField" json:"double_field" yaml:"DoubleField"` + StringField string `avro:"StringField" json:"string_field" yaml:"StringField"` + BoolField bool `avro:"BoolField" json:"bool_field" yaml:"BoolField"` + BytesField []byte `avro:"BytesField" json:"bytes_field" yaml:"BytesField"` +} diff --git a/example/worker_avro/dummy_event.avsc b/example/worker_avro/dummy_event.avsc new file mode 100644 index 0000000..03ea6e7 --- /dev/null +++ b/example/worker_avro/dummy_event.avsc @@ -0,0 +1,11 @@ +{ + "type": "record", + "name": "DummyEvent", + "fields": [ + {"name": "IntField", "type": "int"}, + {"name": "DoubleField", "type": "double"}, + {"name": "StringField", "type": "string"}, + {"name": "BoolField", "type": "boolean"}, + {"name": "BytesField", "type": "bytes"} + ] +} \ No newline at end of file diff --git a/example/worker_avro/go.mod b/example/worker_avro/go.mod new file mode 100644 index 0000000..2bb9cf1 --- /dev/null +++ b/example/worker_avro/go.mod @@ -0,0 +1,22 @@ +module github.com/zillow/zkafka/example/worker_avro + +go 1.23.1 + +replace github.com/zillow/zkafka v1.0.0 => ../.. + +require ( + github.com/google/uuid v1.6.0 +) + +require ( + github.com/actgardner/gogen-avro/v10 v10.2.1 // indirect + github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 // indirect + github.com/golang/protobuf v1.5.4 // indirect + github.com/heetch/avro v0.4.5 // indirect + github.com/sony/gobreaker v1.0.0 // indirect + github.com/zillow/zfmt v1.0.1 // indirect + go.opentelemetry.io/otel v1.28.0 // indirect + go.opentelemetry.io/otel/trace v1.28.0 // indirect + golang.org/x/sync v0.7.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect +) diff --git a/example/worker_avro/go.sum b/example/worker_avro/go.sum new file mode 100644 index 0000000..f1a03d8 --- /dev/null +++ b/example/worker_avro/go.sum @@ -0,0 +1,411 @@ +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= +github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= +github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= +github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= +github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7fz8= +github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= +github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= +github.com/actgardner/gogen-avro/v10 v10.2.1 h1:z3pOGblRjAJCYpkIJ8CmbMJdksi4rAhaygw0dyXZ930= +github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ= +github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= +github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= +github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= +github.com/aws/aws-sdk-go-v2/config v1.27.10/go.mod h1:BePM7Vo4OBpHreKRUMuDXX+/+JWP38FLkzl5m27/Jjs= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10 h1:qDZ3EA2lv1KangvQB6y258OssCHD0xvaGiEDkG4X/10= +github.com/aws/aws-sdk-go-v2/credentials v1.17.10/go.mod h1:6t3sucOaYDwDssHQa0ojH1RpmVmF5/jArkye1b2FKMI= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1 h1:FVJ0r5XTHSmIHJV6KuDmdYhEpvlHpiSd38RQWhut5J4= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.1/go.mod h1:zusuAeqezXzAB24LGuzuekqMAEgWkVYukBec3kr3jUg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1xUsUr3I8cHps0G+XM3WWU16lP6yG8qu1GAZAs= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= +github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4/go.mod h1:mUYPBhaF2lGiukDEjJX2BLRRKTmoUSitGDUgM4tRxak= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n2HZPkcKgPAi1phU= +github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= +github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= +github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= +github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/compose-spec/compose-go/v2 v2.1.0 h1:qdW2qISQlCQG8v1O2TChcdxgAWTUGgUX/CPSO+ES9+E= +github.com/compose-spec/compose-go/v2 v2.1.0/go.mod h1:bEPizBkIojlQ20pi2vNluBa58tevvj0Y18oUSHPyfdc= +github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 h1:PM18lA9g6u6Qcz06DpXmGRlxXTvWlHqnlAkQi1chPUo= +github.com/confluentinc/confluent-kafka-go/v2 v2.5.0/go.mod h1:Hyo+IIQ/tmsfkOcRP8T6VlSeOW3T33v0Me8Xvq4u90Y= +github.com/containerd/console v1.0.4 h1:F2g4+oChYvBTsASRTz8NP6iIAi97J3TtSAsLbIFn4ro= +github.com/containerd/console v1.0.4/go.mod h1:YynlIjWYF8myEu6sdkwKIvGQq+cOckRm6So2avqoYAk= +github.com/containerd/containerd v1.7.15 h1:afEHXdil9iAm03BmhjzKyXnnEBtjaLJefdU7DV0IFes= +github.com/containerd/containerd v1.7.15/go.mod h1:ISzRRTMF8EXNpJlTzyr2XMhN+j9K302C21/+cr3kUnY= +github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8= +github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/ttrpc v1.2.3 h1:4jlhbXIGvijRtNC8F/5CpuJZ7yKOBFGFOOXg1bkISz0= +github.com/containerd/ttrpc v1.2.3/go.mod h1:ieWsXucbb8Mj9PH0rXCw1i8IunRbbAiDkpXkbfflWBM= +github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4= +github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0= +github.com/cpuguy83/dockercfg v0.3.1 h1:/FpZ+JaygUR/lZP2NlFI2DVfrOEMAIKP5wWEJdoYe9E= +github.com/cpuguy83/dockercfg v0.3.1/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk= +github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= +github.com/docker/buildx v0.14.0 h1:FxqcfE7xgeEC4oQlKLpuvfobRDVDXrHE3jByM+mdyqk= +github.com/docker/buildx v0.14.0/go.mod h1:Vy/2lC9QsJvo33+7KKkN/GDE5WxnVqW0/dpcN7ZqPJY= +github.com/docker/cli v26.1.0+incompatible h1:+nwRy8Ocd8cYNQ60mozDDICICD8aoFGtlPXifX/UQ3Y= +github.com/docker/cli v26.1.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/compose/v2 v2.27.0 h1:FKyClQdErCxUZULC2zo6Jn5ve+epFPe/Y0HaxjmUzNg= +github.com/docker/compose/v2 v2.27.0/go.mod h1:uaqwmY6haO8wXWHk+LAsqqDapX6boH4izRKqj/E7+Bo= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v26.1.0+incompatible h1:W1G9MPNbskA6VZWL7b3ZljTh0pXI68FpINx0GKaOdaM= +github.com/docker/docker v26.1.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.8.0 h1:YQFtbBQb4VrpoPxhFuzEBPQ9E16qz5SpHLS+uswaCp8= +github.com/docker/docker-credential-helpers v0.8.0/go.mod h1:UGFXcuoQ5TxPiB54nHOZ32AWRqQdECoh/Mg0AlEYb40= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0= +github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= +github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8= +github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= +github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= +github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203 h1:XBBHcIb256gUJtLmY22n99HaZTz+r2Z51xUPi01m3wg= +github.com/eiannone/keyboard v0.0.0-20220611211555-0d226195f203/go.mod h1:E1jcSv8FaEny+OP/5k9UxZVw9YFWGj7eI4KR/iOBqCg= +github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g= +github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/frankban/quicktest v1.14.0 h1:+cqqvzZV87b4adx/5ayVOaYZ2CrvM4ejQvUdBzPPUss= +github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= +github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvnkw= +github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= +github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= +github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= +github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= +github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE= +github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= +github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= +github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= +github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/heetch/avro v0.4.5 h1:BSnj4wEeUG1IjMTm9/tBwQnV3euuIVa1mRWHnm1t8VU= +github.com/heetch/avro v0.4.5/go.mod h1:gxf9GnbjTXmWmqxhdNbAMcZCjpye7RV5r9t3Q0dL6ws= +github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= +github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= +github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY= +github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= +github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= +github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= +github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= +github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= +github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/buildkit v0.13.1 h1:L8afOFhPq2RPJJSr/VyzbufwID7jquZVB7oFHbPRcPE= +github.com/moby/buildkit v0.13.1/go.mod h1:aNmNQKLBFYAOFuzQjR3VA27/FijlvtBD1pjNwTSN37k= +github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= +github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg= +github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= +github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= +github.com/moby/sys/mountinfo v0.7.1 h1:/tTvQaSJRr2FshkhXiIpux6fQ2Zvc4j7tAhMTStAG2g= +github.com/moby/sys/mountinfo v0.7.1/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/moby/sys/signal v0.7.0 h1:25RW3d5TnQEoKvRbEKUGay6DCQ46IxAVTT9CUMgmsSI= +github.com/moby/sys/signal v0.7.0/go.mod h1:GQ6ObYZfqacOwTtlXvcmh9A26dVRul/hbOZn88Kg8Tg= +github.com/moby/sys/symlink v0.2.0 h1:tk1rOM+Ljp0nFmfOIBtlV3rTDlWOwFRhjEeAhZB0nZc= +github.com/moby/sys/symlink v0.2.0/go.mod h1:7uZVF2dqJjG/NsClqul95CqKOBRQyYSNnJ6BMgR/gFs= +github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= +github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= +github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= +github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= +github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= +github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc h1:zAsgcP8MhzAbhMnB1QQ2O7ZhWYVGYSR2iVcjzQuPV+o= +github.com/r3labs/sse v0.0.0-20210224172625-26fe804710bc/go.mod h1:S8xSOnV3CgpNrWd0GQ/OoQfMtlg2uPRSuTzcSGrzwK8= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= +github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= +github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc= +github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= +github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= +github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4= +github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= +github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= +github.com/sony/gobreaker v1.0.0 h1:feX5fGGXSl3dYd4aHZItw+FpHLvvoaqkawKjVNiFMNQ= +github.com/sony/gobreaker v1.0.0/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= +github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/testcontainers/testcontainers-go v0.31.0 h1:W0VwIhcEVhRflwL9as3dhY6jXjVCA27AkmbnZ+UTh3U= +github.com/testcontainers/testcontainers-go v0.31.0/go.mod h1:D2lAoA0zUFiSY+eAflqK5mcUx/A5hrrORaEQrd0SefI= +github.com/testcontainers/testcontainers-go/modules/compose v0.31.0 h1:H74o3HisnApIDQx7sWibGzOl/Oo0By8DjyVeUf3qd6I= +github.com/testcontainers/testcontainers-go/modules/compose v0.31.0/go.mod h1:z1JAsvL2/pNFy40yJX0VX9Yk+hzOCIO5DydxBJHBbCY= +github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c= +github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= +github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= +github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= +github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= +github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= +github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/tonistiigi/fsutil v0.0.0-20240301111122-7525a1af2bb5 h1:oZS8KCqAg62sxJkEq/Ppzqrb6EooqzWtL8Oaex7bc5c= +github.com/tonistiigi/fsutil v0.0.0-20240301111122-7525a1af2bb5/go.mod h1:vbbYqJlnswsbJqWUcJN8fKtBhnEgldDrcagTgnBVKKM= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/v/cCndK0AMpt1wiVFb/YYmqB3/QG0= +github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= +github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs= +github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +github.com/zillow/zfmt v1.0.1 h1:JLN5WaxoqqoEPUpVWer83uhXhDPAA2nZkfQqgKnWp+w= +github.com/zillow/zfmt v1.0.1/go.mod h1:0PpKh4rWh+5Ghr2bbuN5UvEcqEz6PkHfE0Idgjyxy7Y= +github.com/zillow/zkafka v1.0.2 h1:xAcKfILqz5qcqcfcPYw3nXJXr9nOeXbgMfVhQNilINg= +github.com/zillow/zkafka v1.0.2/go.mod h1:cA7XPO8b91ByfgDW6iBujB5vB98U1GP73Vraq0z6WRw= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1 h1:gbhw/u49SS3gkPWiYweQNJGm/uJN5GkI/FrosxSHT7A= +go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1/go.mod h1:GnOaBaFQ2we3b9AGWJpsBa7v1S5RlQzlC3O7dRMxZhM= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw= +go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= +go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0 h1:ZtfnDL+tUrs1F0Pzfwbg2d59Gru9NCH3bgSHBM6LDwU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.42.0/go.mod h1:hG4Fj/y8TR/tlEDREo8tWstl9fO9gcFkn4xrx0Io8xU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0 h1:NmnYCiR0qNufkldjVvyQfZTHSdzeHoZ41zggMsdMcLM= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v0.42.0/go.mod h1:UVAO61+umUsHLtYb8KXXRoHtxUkdOPkYidzW3gipRLQ= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0 h1:wNMDy/LVGLj2h3p6zg4d0gypKfWKSWI14E1C4smOgl8= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v0.42.0/go.mod h1:YfbDdXAAkemWJK3H/DshvlrxqFB2rtW4rY6ky/3x/H0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0 h1:cl5P5/GIfFh4t6xyruOgJP5QiA1pw4fYYdv6nc6CBWw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.21.0/go.mod h1:zgBdWWAu7oEEMC06MMKc5NLbA/1YDXV1sMpSqEeLQLg= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0 h1:tIqheXEFWAZ7O8A7m+J0aPTmpJN3YQ7qetUAdkkkKpk= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.21.0/go.mod h1:nUeKExfxAQVbiVFn32YXpXZZHZ61Cc3s3Rn1pDBGAb0= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkEZCJWobwBqMwC0cwCq8/wkkRy/OowZg5OArWZrM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I= +go.opentelemetry.io/otel/exporters/prometheus v0.42.0 h1:jwV9iQdvp38fxXi8ZC+lNpxjK16MRcZlpDYvbuO1FiA= +go.opentelemetry.io/otel/exporters/prometheus v0.42.0/go.mod h1:f3bYiqNqhoPxkvI2LrXqQVC546K7BuRDL/kKuxkujhA= +go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= +go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= +go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= +go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= +go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0= +go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q= +go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= +go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= +go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= +go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= +go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= +golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= +golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o= +golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ= +golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= +golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= +google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= +google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0= +google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa/go.mod h1:CnZenrTdRJb7jc+jOm0Rkywq+9wh0QC4U8tyiRbEPPM= +google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237 h1:RFiFrvy37/mpSpdySBDrUdipW/dHwsRwh3J3+A9VgT4= +google.golang.org/genproto/googleapis/api v0.0.0-20240318140521-94a12d6c2237/go.mod h1:Z5Iiy3jtmioajWHDGFk7CeugTyHtPvMHA4UTmUkyalE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237 h1:NnYq6UN9ReLM9/Y01KWNOWyI5xQ9kbIms5GGJVwS/Yc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240318140521-94a12d6c2237/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= +google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= +google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y= +gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +k8s.io/api v0.29.2 h1:hBC7B9+MU+ptchxEqTNW2DkUosJpp1P+Wn6YncZ474A= +k8s.io/api v0.29.2/go.mod h1:sdIaaKuU7P44aoyyLlikSLayT6Vb7bvJNCX105xZXY0= +k8s.io/apimachinery v0.29.2 h1:EWGpfJ856oj11C52NRCHuU7rFDwxev48z+6DSlGNsV8= +k8s.io/apimachinery v0.29.2/go.mod h1:6HVkd1FwxIagpYrHSwJlQqZI3G9LfYWRPAkUvLnXTKU= +k8s.io/apiserver v0.29.2 h1:+Z9S0dSNr+CjnVXQePG8TcBWHr3Q7BmAr7NraHvsMiQ= +k8s.io/apiserver v0.29.2/go.mod h1:B0LieKVoyU7ykQvPFm7XSdIHaCHSzCzQWPFa5bqbeMQ= +k8s.io/client-go v0.29.2 h1:FEg85el1TeZp+/vYJM7hkDlSTFZ+c5nnK44DJ4FyoRg= +k8s.io/client-go v0.29.2/go.mod h1:knlvFZE58VpqbQpJNbCbctTVXcd35mMyAAwBdpt4jrA= +k8s.io/klog/v2 v2.110.1 h1:U/Af64HJf7FcwMcXyKm2RPM22WZzyR7OSpYj5tg3cL0= +k8s.io/klog/v2 v2.110.1/go.mod h1:YGtd1984u+GgbuZ7e08/yBuAfKLSO0+uR1Fhi6ExXjo= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 h1:aVUu9fTY98ivBPKR9Y5w/AuzbMm96cd3YHRTU83I780= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00/go.mod h1:AsvuZPBlUDVuCdzJ87iajxtXuR9oktsTctW/R9wwouA= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= +tags.cncf.io/container-device-interface v0.6.2 h1:dThE6dtp/93ZDGhqaED2Pu374SOeUkBfuvkLuiTdwzg= +tags.cncf.io/container-device-interface v0.6.2/go.mod h1:Shusyhjs1A5Na/kqPVLL0KqnHQHuunol9LFeUNkuGVE= diff --git a/example/worker_avro/main.go b/example/worker_avro/main.go new file mode 100644 index 0000000..0264cd6 --- /dev/null +++ b/example/worker_avro/main.go @@ -0,0 +1,97 @@ +package main + +import ( + "context" + _ "embed" + "log" + "os" + "os/signal" + "syscall" + "time" + + "github.com/google/uuid" + "github.com/zillow/zkafka" +) + +//go:embed dummy_event.avsc +var dummyEventSchema string + +// Demonstrates reading from a topic via the zkafka.Work struct which is more convenient, typically, than using the consumer directly +func main() { + ctx := context.Background() + client := zkafka.NewClient(zkafka.Config{ + BootstrapServers: []string{"localhost:29092"}, + }, + // optionally add a logger, which implements zkafka.Logger, to see detailed information about message processsing + //zkafka.LoggerOption(), + ) + // It's important to close the client after consumption to gracefully leave the consumer group + // (this commits completed work, and informs the broker that this consumer is leaving the group which yields a faster rebalance) + defer client.Close() + + topicConfig := zkafka.ConsumerTopicConfig{ + // ClientID is used for caching inside zkafka, and observability within streamz dashboards. But it's not an important + // part of consumer group semantics. A typical convention is to use the service name executing the kafka worker + ClientID: "service-name", + // GroupID is the consumer group. If multiple instances of the same consumer group read messages for the same + // topic the topic's partitions will be split between the collection. The broker remembers + // what offset has been committed for a consumer group, and therefore work can be picked up where it was left off + // across releases + GroupID: uuid.NewString(), + //GroupID: "zkafka/example/example-consumer", + Topic: "zkafka-example-topic", + // The formatter is registered internally to the `zkafka.Message` and used when calling `msg.Decode()` + // string fmt can be used for both binary and pure strings encoded in the value field of the kafka message. Other options include + // json, proto, avro, etc. + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Deserialization: struct{}{}, + }, + AdditionalProps: map[string]any{ + // only important the first time a consumer group connects. Subsequent connections will start + // consuming messages + "auto.offset.reset": "earliest", + }, + } + // optionally set up a channel to signal when worker shutdown should occur. + // A nil channel is also acceptable, but this example demonstrates how to make utility of the signal. + // The channel should be closed, instead of simply written to, to properly broadcast to the separate worker threads. + stopCh := make(chan os.Signal, 1) + signal.Notify(stopCh, os.Interrupt, syscall.SIGTERM) + shutdown := make(chan struct{}) + + go func() { + <-stopCh + close(shutdown) + }() + + wf := zkafka.NewWorkFactory(client) + // Register a processor which is executed per message. + // Speedup is used to create multiple processor goroutines. Order is still maintained with this setup by way of `virtual partitions` + work := wf.Create(topicConfig, &Processor{}, zkafka.Speedup(1)) + if err := work.Run(ctx, shutdown); err != nil { + log.Panic(err) + } +} + +type Processor struct{} + +func (p Processor) Process(_ context.Context, msg *zkafka.Message) error { + // sleep to simulate random amount of work + time.Sleep(100 * time.Millisecond) + event := DummyEvent{} + err := msg.Decode(&event) + if err != nil { + log.Printf("error occurred during processing: %s", err) + return err + } + // optionally, if you don't want to use the configured formatter at all, access the kafka message payload bytes directly. + // The commented out block shows accessing the byte array. In this case we're stringifying the bytes, but this could be json unmarshalled, + // proto unmarshalled etc., depending on the expected payload + //data := msg.Value() + //str := string(data) + + log.Printf(" offset: %d, partition: %d. event.Age: %d, event.Name %s\n", msg.Offset, msg.Partition, event.IntField, event.StringField) + return nil +} diff --git a/formatter.go b/formatter.go index ac08af4..29e4a18 100644 --- a/formatter.go +++ b/formatter.go @@ -2,13 +2,18 @@ package zkafka import ( "errors" + "fmt" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" "github.com/zillow/zfmt" ) const ( // CustomFmt indicates that the user would pass in their own Formatter later - CustomFmt zfmt.FormatterType = "custom" + CustomFmt zfmt.FormatterType = "custom" + AvroConfluentFmt zfmt.FormatterType = "avro_confluent" ) var errMissingFmtter = errors.New("custom formatter is missing, did you forget to call WithFormatter()") @@ -19,16 +24,130 @@ type Formatter interface { Unmarshal(b []byte, v any) error } -// noopFormatter is a formatter that returns error when called. The error will remind the user +type confluentFormatter interface { + Marshall(topic string, v any, schema string) ([]byte, error) + Unmarshal(topic string, b []byte, v any) error +} + +func getFormatter(formatter zfmt.FormatterType, schemaID int, srCfg SchemaRegistryConfig) (Formatter, confluentFormatter, error) { + switch formatter { + case AvroConfluentFmt: + cf, err := newAvroSchemaRegistryFormatter(srCfg) + return nil, cf, err + case CustomFmt: + return &errFormatter{}, nil, nil + default: + f, err := zfmt.GetFormatter(formatter, schemaID) + if err != nil { + return nil, nil, fmt.Errorf("unsupported formatter %s", formatter) + } + return f, nil, nil + } +} + +// errFormatter is a formatter that returns error when called. The error will remind the user // to provide appropriate implementation -type noopFormatter struct{} +type errFormatter struct{} // Marshall returns error with reminder -func (f noopFormatter) Marshall(_ any) ([]byte, error) { +func (f errFormatter) Marshall(_ any) ([]byte, error) { return nil, errMissingFmtter } // Unmarshal returns error with reminder -func (f noopFormatter) Unmarshal(_ []byte, _ any) error { +func (f errFormatter) Unmarshal(_ []byte, _ any) error { return errMissingFmtter } + +var _ confluentFormatter = (*avroSchemaRegistryFormatter)(nil) + +type avroSchemaRegistryFormatter struct { + deser *avrov2.Deserializer + ser *avrov2.Serializer + f zfmt.SchematizedAvroFormatter +} + +func newAvroSchemaRegistryFormatter(srConfig SchemaRegistryConfig) (avroSchemaRegistryFormatter, error) { + url := srConfig.URL + if url == "" { + return avroSchemaRegistryFormatter{}, errors.New("no schema registry url provided") + + } + client, err := schemaregistry.NewClient(schemaregistry.NewConfig(url)) + if err != nil { + return avroSchemaRegistryFormatter{}, fmt.Errorf("failed to create schema registry client: %w", err) + } + + deserConfig := avrov2.NewDeserializerConfig() + deser, err := avrov2.NewDeserializer(client, serde.ValueSerde, deserConfig) + if err != nil { + return avroSchemaRegistryFormatter{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := avrov2.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := avrov2.NewSerializer(client, serde.ValueSerde, serConfig) + if err != nil { + return avroSchemaRegistryFormatter{}, fmt.Errorf("failed to create serializer: %w", err) + } + return avroSchemaRegistryFormatter{ + deser: deser, + ser: ser, + }, nil +} + +func (f avroSchemaRegistryFormatter) Marshall(topic string, target any, avroSchema string) ([]byte, error) { + //info := schemaregistry.SchemaInfo{ + // Schema: "", + //} + //f.ser.GetID(topic, nil, nil) + if avroSchema != "" { + info := schemaregistry.SchemaInfo{ + Schema: avroSchema, + } + id, err := f.ser.GetID(topic, nil, &info) + if err != nil { + return nil, fmt.Errorf("failed to get avro schema by id: %w", err) + } + //subject, err := f.ser.SubjectNameStrategy(topic, f.ser.SerdeType, info) + //if err != nil { + // return nil, err + //} + //target, err = f.ser.ExecuteRules(subject, topic, schemaregistry.Write, nil, &info, target) + //if err != nil { + // return nil, err + //} + f.f.SchemaID = id + return f.f.Marshall(target) + + //schema, err := avro.Parse(avroSchema) + //if err != nil { + // return nil, fmt.Errorf("failed to parse avro schema: %w", err) + //} + //msgBytes, err := avro.Marshal(schema, target) + //if err != nil { + // return nil, err + //} + //payload, err := f.ser.WriteBytes(id, msgBytes) + //if err != nil { + // return nil, err + //} + //return payload, nil + + } + value, err := f.ser.Serialize(topic, target) + if err != nil { + return nil, fmt.Errorf("failed to serialize confluent schema registry avro type: %w", err) + } + return value, nil +} + +func (f avroSchemaRegistryFormatter) Unmarshal(topic string, value []byte, target any) error { + err := f.deser.DeserializeInto(topic, value, &target) + if err != nil { + return fmt.Errorf("failed to deserialize to confluent schema registry avro type: %w", err) + } + return nil +} diff --git a/formatter_test.go b/formatter_test.go index d9e6aa8..a842129 100644 --- a/formatter_test.go +++ b/formatter_test.go @@ -8,7 +8,7 @@ import ( func TestNoopFormatter_Marshall_Unmarshal(t *testing.T) { defer recoverThenFail(t) - fmtter := noopFormatter{} + fmtter := errFormatter{} _, err := fmtter.Marshall("anything") require.ErrorIs(t, err, errMissingFmtter) diff --git a/go.mod b/go.mod index af79f0d..d5a9ec6 100644 --- a/go.mod +++ b/go.mod @@ -21,7 +21,12 @@ require ( github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/protobuf v1.5.4 // indirect + github.com/hamba/avro/v2 v2.20.1 // indirect github.com/heetch/avro v0.4.5 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect go.opentelemetry.io/otel/metric v1.28.0 // indirect google.golang.org/protobuf v1.34.2 // indirect diff --git a/go.sum b/go.sum index 436b46a..1e0f77a 100644 --- a/go.sum +++ b/go.sum @@ -1,11 +1,27 @@ +cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU= +cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0 h1:DRiANoJTiW6obBQe3SqZizkuV1PEgfiiGivmVocDy64= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0/go.mod h1:qLIye2hwb/ZouqhpSD9Zn3SJipvpEnz1Ywl3VUk9Y0s= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= @@ -16,6 +32,8 @@ github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpH github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/actgardner/gogen-avro/v10 v10.2.1 h1:z3pOGblRjAJCYpkIJ8CmbMJdksi4rAhaygw0dyXZ930= github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= @@ -34,6 +52,8 @@ github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1x github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1 h1:SBn4I0fJXF9FYOVRSVMWuhvEKoAHDikjGpS3wlmw5DE= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1/go.mod h1:2snWQJQUKsbN66vAawJuOGX7dr37pfOq9hb0tZDGIqQ= github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= @@ -46,6 +66,8 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/cenkalti/backoff/v3 v3.0.0 h1:ske+9nBpD9qZsTBoF41nW5L+AIuFBKMeze18XQ3eG1c= +github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= @@ -105,6 +127,8 @@ github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvn github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -118,28 +142,43 @@ github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2Kv github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= +github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/cel-go v0.20.1 h1:nDx9r8S3L4pE61eDdt8igGj8rf5kjYR3ILxWIpWNi84= +github.com/google/cel-go v0.20.1/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg= github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA= +github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= @@ -148,14 +187,30 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaW github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hamba/avro/v2 v2.20.1 h1:3WByQiVn7wT7d27WQq6pvBRC00FVOrniP6u67FLA/2E= +github.com/hamba/avro/v2 v2.20.1/go.mod h1:xHiKXbISpb3Ovc809XdzWow+XGTn+Oyf/F9aZbTLAig= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.7.5 h1:bJj+Pj19UZMIweq/iie+1u5YCdGrnxCT9yvm0e+Nd5M= +github.com/hashicorp/go-retryablehttp v0.7.5/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6 h1:om4Al8Oy7kCm/B86rLCLah4Dt5Aa0Fr5rYBG60OzwHQ= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= +github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/vault/api v1.12.1 h1:WzGN4X5jrJdNO39g6Sa55djNio3I9DxEBOTmCZE7tm0= +github.com/hashicorp/vault/api v1.12.1/go.mod h1:1pqP/sErScodde+ybJCyP+ONC4jzEg7Dmawg/QLWo1k= github.com/heetch/avro v0.4.5 h1:BSnj4wEeUG1IjMTm9/tBwQnV3euuIVa1mRWHnm1t8VU= github.com/heetch/avro v0.4.5/go.mod h1:gxf9GnbjTXmWmqxhdNbAMcZCjpye7RV5r9t3Q0dL6ws= github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= @@ -178,6 +233,8 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -200,6 +257,8 @@ github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= @@ -226,6 +285,7 @@ github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= @@ -242,6 +302,8 @@ github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQ github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -262,6 +324,8 @@ github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= +github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= @@ -272,6 +336,8 @@ github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11 github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= @@ -282,6 +348,8 @@ github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= @@ -294,6 +362,12 @@ github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4D github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0 h1:A/2tIdYXqUuVZeWy0Yq/PWKsXgebzMyh5mLbpNEMVUo= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0/go.mod h1:QXPc/i5yUEWWZ4lbe2WOam1kDdrXjGHRjl0Lzo7IQDU= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0 h1:REG5YX2omhgPmiIT7GLqmzWFnIksZsog1FHJ+Pi1xJE= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0/go.mod h1:OJLS+EYJo/BTViJj7EBG5deKLeQfYwVNW8HMS1qHAAo= +github.com/tink-crypto/tink-go/v2 v2.1.0 h1:QXFBguwMwTIaU17EgZpEJWsUSc60b1BAGTzBIoMdmok= +github.com/tink-crypto/tink-go/v2 v2.1.0/go.mod h1:y1TnYFt1i2eZVfx4OGc+C+EMp4CoKWAw2VSEuoicHHI= github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= @@ -310,11 +384,15 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiatechs/jsonata-go v1.8.5 h1:m1NaokPKD6LPaTPRl674EQz5mpkJvM3ymjdReDEP6/A= +github.com/xiatechs/jsonata-go v1.8.5/go.mod h1:yGEvviiftcdVfhSRhRSpgyTel89T58f+690iB0fp2Vk= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/zillow/zfmt v1.0.1 h1:JLN5WaxoqqoEPUpVWer83uhXhDPAA2nZkfQqgKnWp+w= github.com/zillow/zfmt v1.0.1/go.mod h1:0PpKh4rWh+5Ghr2bbuN5UvEcqEz6PkHfE0Idgjyxy7Y= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1 h1:gbhw/u49SS3gkPWiYweQNJGm/uJN5GkI/FrosxSHT7A= @@ -389,6 +467,8 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY= +google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0= diff --git a/message.go b/message.go index a78f638..df8c512 100644 --- a/message.go +++ b/message.go @@ -15,18 +15,19 @@ type Message struct { Key string // There's a difference between a nil key and an empty key. A nil key gets assigned a topic partition by kafka via round-robin. // An empty key is treated as a key with a value of "" and is assigned to a topic partition via the hash of the key (so will consistently go to the same key) - isKeyNil bool - Headers map[string][]byte - Offset int64 - Partition int32 - Topic string - GroupID string - TimeStamp time.Time - value []byte - topicPartition kafka.TopicPartition - fmt zfmt.Formatter - doneFunc func(ctx context.Context) - doneOnce sync.Once + isKeyNil bool + Headers map[string][]byte + Offset int64 + Partition int32 + Topic string + GroupID string + TimeStamp time.Time + value []byte + topicPartition kafka.TopicPartition + fmt zfmt.Formatter + confluentFormatter confluentFormatter + doneFunc func(ctx context.Context) + doneOnce sync.Once } // DoneWithContext is used to alert that message processing has completed. @@ -53,12 +54,17 @@ func (m *Message) Decode(v any) error { if m.value == nil { return errors.New("message is empty") } - if m.fmt == nil { - // is error is most likely due to user calling KReader/KWriter - // with custom Formatter which can sometimes be nil - return errors.New("formatter is not set") + return m.unmarshall(v) +} + +func (m *Message) unmarshall(target any) error { + if m.fmt != nil { + return m.fmt.Unmarshal(m.value, target) + } + if m.confluentFormatter != nil { + return m.confluentFormatter.Unmarshal(m.Topic, m.value, target) } - return m.fmt.Unmarshal(m.value, v) + return errors.New("formatter or confluent formatter is not supplied to decode kafka message") } // Value returns a copy of the current value byte array. Useful for debugging diff --git a/reader.go b/reader.go index 6750dc3..c1f9ced 100644 --- a/reader.go +++ b/reader.go @@ -43,7 +43,9 @@ type KReader struct { topicConfig ConsumerTopicConfig isClosed bool - fmtter Formatter + fmtter Formatter + confluentFormatter confluentFormatter + logger Logger lifecycle LifecycleHooks once sync.Once @@ -58,16 +60,18 @@ func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentC return nil, err } - fmtter, err := getFormatter(topicConfig) + formatter, cFormatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry) if err != nil { return nil, err } + return &KReader{ - consumer: consumer, - fmtter: fmtter, - topicConfig: topicConfig, - logger: logger, - tCommitMgr: newTopicCommitMgr(), + consumer: consumer, + fmtter: formatter, + confluentFormatter: cFormatter, + topicConfig: topicConfig, + logger: logger, + tCommitMgr: newTopicCommitMgr(), }, nil } @@ -197,8 +201,9 @@ func (r *KReader) mapMessage(_ context.Context, msg kafka.Message) *Message { r.logger.Errorw(ctx, "Error storing offsets", "topicName", topicName, "groupID", r.topicConfig.GroupID, "partition", partition, "offset", offset, "error", err) } }, - value: msg.Value, - fmt: r.fmtter, + value: msg.Value, + fmt: r.fmtter, + confluentFormatter: r.confluentFormatter, } } diff --git a/writer.go b/writer.go index 3558f91..75d9d93 100644 --- a/writer.go +++ b/writer.go @@ -47,6 +47,7 @@ type KWriter struct { producer KafkaProducer topicConfig ProducerTopicConfig fmtter Formatter + cFormatter confluentFormatter logger Logger tracer trace.Tracer p propagation.TextMapPropagator @@ -65,13 +66,15 @@ func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentP if err != nil { return nil, err } - fmtter, err := getFormatter(topicConfig) + formatter, cFormatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry) if err != nil { return nil, err } + return &KWriter{ producer: p, - fmtter: fmtter, + fmtter: formatter, + cFormatter: cFormatter, topicConfig: topicConfig, logger: NoopLogger{}, }, nil @@ -191,17 +194,32 @@ func (w *KWriter) startSpan(ctx context.Context, msg *kafka.Message) spanWrapper } func (w *KWriter) write(ctx context.Context, msg keyValuePair, opts ...WriteOption) (Response, error) { - if w.fmtter == nil { - return Response{}, errors.New("formatter is not supplied to produce kafka message") + s := writeSettings{} + for _, opt := range opts { + opt2, ok := opt.(WriteOption2) + if !ok { + continue + } + opt2.applySettings(&s) } - value, err := w.fmtter.Marshall(msg.value) + value, err := w.marshall(ctx, msg.value, s.avroSchema) if err != nil { - return Response{}, fmt.Errorf("failed to marshall producer message: %w", err) + return Response{}, err } return w.WriteRaw(ctx, msg.key, value, opts...) } +func (w *KWriter) marshall(_ context.Context, value any, schema string) ([]byte, error) { + if w.fmtter != nil { + return w.fmtter.Marshall(value) + } + if w.cFormatter != nil { + return w.cFormatter.Marshall(w.topicConfig.Topic, value, schema) + } + return nil, errors.New("formatter or confluent formatter is not supplied to produce kafka message") +} + // Close terminates the writer gracefully and mark it as closed func (w *KWriter) Close() { w.mu.Lock() @@ -227,9 +245,19 @@ type WriteOption interface { apply(s *kafka.Message) } +type writeSettings struct { + avroSchema string +} + +type WriteOption2 interface { + applySettings(s *writeSettings) +} + // WithHeaders allows for the specification of headers. Specified headers will override collisions. func WithHeaders(headers map[string]string) WriteOption { return withHeaderOption{headers: headers} } +func WithAvroSchema(schema string) WriteOption { return withAvroSchema{schema: schema} } + type withHeaderOption struct { headers map[string]string } @@ -252,3 +280,15 @@ func (o withHeaderOption) apply(s *kafka.Message) { updateHeaders(k, v) } } + +var _ WriteOption2 = (*withAvroSchema)(nil) + +type withAvroSchema struct { + schema string +} + +func (o withAvroSchema) apply(s *kafka.Message) {} + +func (o withAvroSchema) applySettings(s *writeSettings) { + s.avroSchema = o.schema +} From e7a63c09afdc9b41fabd438e1cac1123e6e006a4 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 9 Sep 2024 09:22:02 -0700 Subject: [PATCH 02/34] Updated --- .github/workflows/go.yml | 1 + Makefile | 7 +- coverage.sh | 8 +- example/worker_avro/bla.go | 12 -- example/worker_avro/dummy_event_gen.go | 29 +++++ formatter.go | 27 ---- test/dummy_event_1.avsc | 11 ++ test/dummy_event_2.avsc | 12 ++ test/heetch1/dummy_event_1_gen.go | 29 +++++ test/heetch2/dummy_event_2_gen.go | 30 +++++ test/integration_test.go | 21 ++- test/schema_registry_test.go | 173 +++++++++++++++++++++++++ 12 files changed, 310 insertions(+), 50 deletions(-) delete mode 100644 example/worker_avro/bla.go create mode 100644 example/worker_avro/dummy_event_gen.go create mode 100644 test/dummy_event_1.avsc create mode 100644 test/dummy_event_2.avsc create mode 100644 test/heetch1/dummy_event_1_gen.go create mode 100644 test/heetch2/dummy_event_2_gen.go create mode 100644 test/schema_registry_test.go diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index b2c7d15..9a142b7 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -41,6 +41,7 @@ jobs: - name: Test env: KAFKA_BOOTSTRAP_SERVER: ${{ env.kafka_runner_address }}:9092 + ENABLE_KAFKA_BROKER_TESTS: true run: make cover - name: Upload coverage reports to Codecov diff --git a/Makefile b/Makefile index aa300e0..266b2a3 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ setup: # Assumes setup has been executed. Runs go test with coverage .PHONY: cover cover: - export GO_TAGS=--tags=integration; ./coverage.sh + ./coverage.sh # Runs setup and executes tests with coverage. .PHONY: test-local @@ -45,5 +45,8 @@ golangci-lint: .PHONY: gen gen: go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p main -d ./example/producer_avro ./example/producer_avro/dummy_event.avsc + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p main -d ./example/worker_avro ./example/worker_avro/dummy_event.avsc + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p heetch1 -d ./test/heetch1 ./test/dummy_event_1.avsc + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p heetch2 -d ./test/heetch2 ./test/dummy_event_2.avsc #go run github.com/hamba/avro/v2/cmd/avrogen@v2.25.1 -pkg main -o ./example/producer_avro/bla.go -tags json:snake,yaml:upper-camel ./example/producer_avro/dummy_event.avsc - go run github.com/hamba/avro/v2/cmd/avrogen@v2.25.1 -pkg main -o ./example/worker_avro/bla.go -tags json:snake,yaml:upper-camel ./example/worker_avro/dummy_event.avsc + #go run github.com/hamba/avro/v2/cmd/avrogen@v2.25.1 -pkg main -o ./example/worker_avro/bla.go -tags json:snake,yaml:upper-camel ./example/worker_avro/dummy_event.avsc diff --git a/coverage.sh b/coverage.sh index ee8252c..9948763 100755 --- a/coverage.sh +++ b/coverage.sh @@ -1,10 +1,6 @@ #!/usr/bin/env bash set -x -# allows for GO test args to be passed in (Specifically added to control whether or not to pass in `--tags=integration`). -go_tags=$GO_TAGS -go_tags="${go_tags:---tags=unit}" - # golang packages that will be used for either testing or will be assessed for coverage pck1=github.com/zillow/zkafka pck2=$pck1/test @@ -27,11 +23,11 @@ function quit() { } # change to example directory for execution (because it uses hardcoded filepaths, and the testable # examples don't work when executed outside of that directory -go test $go_tags -c -coverpkg=$pck1 -covermode=atomic -o "$root_res" $pck1 +go test -c -coverpkg=$pck1 -covermode=atomic -o "$root_res" $pck1 # convert binary to go formatted go tool test2json -t "$root_res" -test.v -test.coverprofile "$root_out" -go test $go_tags -c -coverpkg=$pck1 -covermode=atomic -o "$source_res" $pck2 +go test -c -coverpkg=$pck1 -covermode=atomic -o "$source_res" $pck2 go tool test2json -t "$source_res" -test.v -test.coverprofile "$source_out" # delete aggregate file diff --git a/example/worker_avro/bla.go b/example/worker_avro/bla.go deleted file mode 100644 index c5f212b..0000000 --- a/example/worker_avro/bla.go +++ /dev/null @@ -1,12 +0,0 @@ -package main - -// Code generated by avro/gen. DO NOT EDIT. - -// DummyEvent is a generated struct. -type DummyEvent struct { - IntField int `avro:"IntField" json:"int_field" yaml:"IntField"` - DoubleField float64 `avro:"DoubleField" json:"double_field" yaml:"DoubleField"` - StringField string `avro:"StringField" json:"string_field" yaml:"StringField"` - BoolField bool `avro:"BoolField" json:"bool_field" yaml:"BoolField"` - BytesField []byte `avro:"BytesField" json:"bytes_field" yaml:"BytesField"` -} diff --git a/example/worker_avro/dummy_event_gen.go b/example/worker_avro/dummy_event_gen.go new file mode 100644 index 0000000..e5245e1 --- /dev/null +++ b/example/worker_avro/dummy_event_gen.go @@ -0,0 +1,29 @@ +// Code generated by avrogen. DO NOT EDIT. + +package main + +import ( + "github.com/heetch/avro/avrotypegen" +) + +type DummyEvent struct { + IntField int + DoubleField float64 + StringField string + BoolField bool + BytesField []byte +} + +// AvroRecord implements the avro.AvroRecord interface. +func (DummyEvent) AvroRecord() avrotypegen.RecordInfo { + return avrotypegen.RecordInfo{ + Schema: `{"fields":[{"name":"IntField","type":"int"},{"name":"DoubleField","type":"double"},{"name":"StringField","type":"string"},{"name":"BoolField","type":"boolean"},{"name":"BytesField","type":"bytes"}],"name":"DummyEvent","type":"record"}`, + Required: []bool{ + 0: true, + 1: true, + 2: true, + 3: true, + 4: true, + }, + } +} diff --git a/formatter.go b/formatter.go index 29e4a18..62948ef 100644 --- a/formatter.go +++ b/formatter.go @@ -99,10 +99,6 @@ func newAvroSchemaRegistryFormatter(srConfig SchemaRegistryConfig) (avroSchemaRe } func (f avroSchemaRegistryFormatter) Marshall(topic string, target any, avroSchema string) ([]byte, error) { - //info := schemaregistry.SchemaInfo{ - // Schema: "", - //} - //f.ser.GetID(topic, nil, nil) if avroSchema != "" { info := schemaregistry.SchemaInfo{ Schema: avroSchema, @@ -111,31 +107,8 @@ func (f avroSchemaRegistryFormatter) Marshall(topic string, target any, avroSche if err != nil { return nil, fmt.Errorf("failed to get avro schema by id: %w", err) } - //subject, err := f.ser.SubjectNameStrategy(topic, f.ser.SerdeType, info) - //if err != nil { - // return nil, err - //} - //target, err = f.ser.ExecuteRules(subject, topic, schemaregistry.Write, nil, &info, target) - //if err != nil { - // return nil, err - //} f.f.SchemaID = id return f.f.Marshall(target) - - //schema, err := avro.Parse(avroSchema) - //if err != nil { - // return nil, fmt.Errorf("failed to parse avro schema: %w", err) - //} - //msgBytes, err := avro.Marshal(schema, target) - //if err != nil { - // return nil, err - //} - //payload, err := f.ser.WriteBytes(id, msgBytes) - //if err != nil { - // return nil, err - //} - //return payload, nil - } value, err := f.ser.Serialize(topic, target) if err != nil { diff --git a/test/dummy_event_1.avsc b/test/dummy_event_1.avsc new file mode 100644 index 0000000..03ea6e7 --- /dev/null +++ b/test/dummy_event_1.avsc @@ -0,0 +1,11 @@ +{ + "type": "record", + "name": "DummyEvent", + "fields": [ + {"name": "IntField", "type": "int"}, + {"name": "DoubleField", "type": "double"}, + {"name": "StringField", "type": "string"}, + {"name": "BoolField", "type": "boolean"}, + {"name": "BytesField", "type": "bytes"} + ] +} \ No newline at end of file diff --git a/test/dummy_event_2.avsc b/test/dummy_event_2.avsc new file mode 100644 index 0000000..8355d50 --- /dev/null +++ b/test/dummy_event_2.avsc @@ -0,0 +1,12 @@ +{ + "type": "record", + "name": "DummyEvent", + "fields": [ + {"name": "IntField", "type": "int"}, + {"name": "DoubleField", "type": "double"}, + {"name": "StringField", "type": "string"}, + {"name": "BoolField", "type": "boolean"}, + {"name": "BytesField", "type": "bytes"}, + {"name": "NewFieldWithDefault", "type": ["null", "string"], "default": null } + ] +} \ No newline at end of file diff --git a/test/heetch1/dummy_event_1_gen.go b/test/heetch1/dummy_event_1_gen.go new file mode 100644 index 0000000..c741965 --- /dev/null +++ b/test/heetch1/dummy_event_1_gen.go @@ -0,0 +1,29 @@ +// Code generated by avrogen. DO NOT EDIT. + +package heetch1 + +import ( + "github.com/heetch/avro/avrotypegen" +) + +type DummyEvent struct { + IntField int + DoubleField float64 + StringField string + BoolField bool + BytesField []byte +} + +// AvroRecord implements the avro.AvroRecord interface. +func (DummyEvent) AvroRecord() avrotypegen.RecordInfo { + return avrotypegen.RecordInfo{ + Schema: `{"fields":[{"name":"IntField","type":"int"},{"name":"DoubleField","type":"double"},{"name":"StringField","type":"string"},{"name":"BoolField","type":"boolean"},{"name":"BytesField","type":"bytes"}],"name":"DummyEvent","type":"record"}`, + Required: []bool{ + 0: true, + 1: true, + 2: true, + 3: true, + 4: true, + }, + } +} diff --git a/test/heetch2/dummy_event_2_gen.go b/test/heetch2/dummy_event_2_gen.go new file mode 100644 index 0000000..509bad9 --- /dev/null +++ b/test/heetch2/dummy_event_2_gen.go @@ -0,0 +1,30 @@ +// Code generated by avrogen. DO NOT EDIT. + +package heetch2 + +import ( + "github.com/heetch/avro/avrotypegen" +) + +type DummyEvent struct { + IntField int + DoubleField float64 + StringField string + BoolField bool + BytesField []byte + NewFieldWithDefault *string +} + +// AvroRecord implements the avro.AvroRecord interface. +func (DummyEvent) AvroRecord() avrotypegen.RecordInfo { + return avrotypegen.RecordInfo{ + Schema: `{"fields":[{"name":"IntField","type":"int"},{"name":"DoubleField","type":"double"},{"name":"StringField","type":"string"},{"name":"BoolField","type":"boolean"},{"name":"BytesField","type":"bytes"},{"default":null,"name":"NewFieldWithDefault","type":["null","string"]}],"name":"DummyEvent","type":"record"}`, + Required: []bool{ + 0: true, + 1: true, + 2: true, + 3: true, + 4: true, + }, + } +} diff --git a/test/integration_test.go b/test/integration_test.go index 83bc3e9..8301a67 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -1,6 +1,3 @@ -//go:build integration -// +build integration - package test import ( @@ -35,6 +32,8 @@ import ( // 1. Restart a consumer (being sure to reuse the same consumer group from before) // 1. Read another message. Assert its the second written message (first was already read and committed) func TestKafkaClientsCanReadOwnWritesAndBehaveProperlyAfterRestart(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() topic := "integration-test-topic-2" + uuid.NewString() bootstrapServer := getBootstrap() @@ -166,6 +165,8 @@ func TestKafkaClientsCanReadOwnWritesAndBehaveProperlyAfterRestart(t *testing.T) // This is in response to a noted issue where rebalance was prone to replayed messages. // There are multiple versions of the tests which vary the processing duration func Test_RebalanceDoesntCauseDuplicateMessages(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + type testCase struct { name string processingDuration time.Duration @@ -350,6 +351,8 @@ func Test_RebalanceDoesntCauseDuplicateMessages(t *testing.T) { // when a consumer joins and starts consuming messages and later when another consumer joins // then there are no duplicate messages processed. func Test_WithMultipleTopics_RebalanceDoesntCauseDuplicateMessages(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + type testCase struct { name string processingDuration time.Duration @@ -523,6 +526,8 @@ func Test_WithMultipleTopics_RebalanceDoesntCauseDuplicateMessages(t *testing.T) // The consumer's processing times are set to a range as opposed to a specific duration. This allows lookahead processing (where messages // of higher offsets are processed and completed, potentially, before lower offsets func Test_WithConcurrentProcessing_RebalanceDoesntCauseDuplicateMessages(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + type testCase struct { name string processingDurationMinMillis int @@ -677,6 +682,8 @@ func Test_WithConcurrentProcessing_RebalanceDoesntCauseDuplicateMessages(t *test // The rebalances are handled during the Poll call under the hood (which is only called while a KReader is in the attempt of Reading. // So as we simulate two members of a group we'll need to keep calling from both consumers so the rebalance eventually occurs func Test_AssignmentsReflectsConsumerAssignments(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() groupID := uuid.NewString() @@ -793,6 +800,8 @@ func Test_AssignmentsReflectsConsumerAssignments(t *testing.T) { // when the second consumer joins and causes a rebalance // then the first isn't infinitely blocked in its rebalance func Test_UnfinishableWorkDoesntBlockWorkIndefinitely(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() groupID := uuid.NewString() @@ -887,6 +896,8 @@ func Test_UnfinishableWorkDoesntBlockWorkIndefinitely(t *testing.T) { // when processing that message errors and a deadletter is configured // then the errored message will be written to the dlt func Test_KafkaClientsCanWriteToTheirDeadLetterTopic(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + bootstrapServer := getBootstrap() topic := "topic1" + uuid.NewString() dlt := "deadlettertopic1" + uuid.NewString() @@ -979,6 +990,8 @@ func Test_KafkaClientsCanWriteToTheirDeadLetterTopic(t *testing.T) { } func Test_WorkDelay_GuaranteesProcessingDelayedAtLeastSpecifiedDelayDurationFromWhenMessageWritten(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() groupID := uuid.NewString() @@ -1085,6 +1098,8 @@ func Test_WorkDelay_GuaranteesProcessingDelayedAtLeastSpecifiedDelayDurationFrom // 2. It also asserts that the time between the first and last message is very short. // This is expected in a backlog situation, since the worker will delay once, and with monotonically increasing timestamps won't have to delay again func Test_WorkDelay_DoesntHaveDurationStackEffect(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() groupID := uuid.NewString() diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go new file mode 100644 index 0000000..4a34aa0 --- /dev/null +++ b/test/schema_registry_test.go @@ -0,0 +1,173 @@ +package test + +import ( + "context" + _ "embed" + "fmt" + "math/rand" + "os" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "github.com/zillow/zkafka" + "github.com/zillow/zkafka/test/heetch1" + "github.com/zillow/zkafka/test/heetch2" +) + +//go:embed dummy_event_1.avsc +var dummyEventSchema1 string + +//go:embed dummy_event_2.avsc +var dummyEventSchema2 string + +const enableSchemaRegistryTest = "ENABLE_SCHEMA_REGISTRY_TESTS" +const enableKafkaBrokerTest = "ENABLE_KAFKA_BROKER_TESTS" + +// Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom tests +// the `avro_confluent` formatter which uses schema registry. +// +// two schemas exists, which are backwards compatible with one another. +// The default behavior of the confluent-kafka-go doesn't handle this well, since the new field in schema2 +// +// which has a default value and is nullable has the default value omitted in schema registration and is therefore +// +// found to be incompatible. Auto registration isn't typically used in production environments, +// but this behavior is still problematic because the implicit schema resolution is used to communicate with schema registry +// and determine the appropriate schemaID to embed. The omitted default means the correct schema isn't found. +// +// The two message sucesfully writing means the two schemas are registered. +// We then test we can use the confluent deserializer to decode the messages. For both schema1 and schema2. +// This confirms that backwards/forward compatible evolution is possible and old schemas can still read messages from new. +func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: struct{ AutoRegisterSchemas bool }{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := heetch1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer.Write(ctx, evt1, zkafka.WithAvroSchema(dummyEventSchema1)) + require.NoError(t, err) + + evt2 := heetch2.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewFieldWithDefault: ptr(uuid.NewString()), + } + _, err = writer.Write(ctx, evt2, zkafka.WithAvroSchema(dummyEventSchema2)) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := heetch1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1) + + receivedEvt2Schema1 := heetch1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := heetch1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1) + + receivedEvt2Schema2 := heetch2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2) +} + +func checkShouldSkipTest(t *testing.T, flags ...string) { + t.Helper() + for _, flag := range flags { + if os.Getenv(flag) != "true" { + t.Skipf("Skipping test. To execute. Set envvar '%s' to true", flag) + } + } +} + +func readMessages(reader zkafka.Reader, count int) (<-chan *zkafka.Message, error) { + + responses := make(chan *zkafka.Message, count) + + seen := 0 + for { + func() { + ctx := context.Background() + rmsg, err := reader.Read(ctx) + defer func() { + if rmsg == nil { + return + } + rmsg.DoneWithContext(ctx) + }() + if err != nil || rmsg == nil { + return + } + responses <- rmsg + seen++ + }() + if seen >= count { + close(responses) + return responses, nil + } + } +} From e1eec3b629d051248de6de062d7aabde6d52df17 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 9 Sep 2024 10:08:28 -0700 Subject: [PATCH 03/34] Updated --- config.go | 17 +++++++++++------ test/schema_registry_test.go | 2 +- writer.go | 4 +++- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/config.go b/config.go index 49e471e..090b5da 100644 --- a/config.go +++ b/config.go @@ -212,12 +212,17 @@ func (p ProducerTopicConfig) GetSchemaID() int { } type SchemaRegistryConfig struct { - URL string - Serialization struct { - AutoRegisterSchemas bool - } - Deserialization struct { - } + URL string + Serialization SerializationConfig + Deserialization DeserializationConfig +} + +type SerializationConfig struct { + AutoRegisterSchemas bool + Schema string +} + +type DeserializationConfig struct { } func getDefaultConsumerTopicConfig(topicConfig *ConsumerTopicConfig) error { diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 4a34aa0..1655e0c 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -61,7 +61,7 @@ func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFro Formatter: zkafka.AvroConfluentFmt, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "http://localhost:8081", - Serialization: struct{ AutoRegisterSchemas bool }{ + Serialization: zkafka.SerializationConfig{ AutoRegisterSchemas: true, }, }, diff --git a/writer.go b/writer.go index 75d9d93..d6feb43 100644 --- a/writer.go +++ b/writer.go @@ -194,7 +194,9 @@ func (w *KWriter) startSpan(ctx context.Context, msg *kafka.Message) spanWrapper } func (w *KWriter) write(ctx context.Context, msg keyValuePair, opts ...WriteOption) (Response, error) { - s := writeSettings{} + s := writeSettings{ + avroSchema: w.topicConfig.SchemaRegistry.Serialization.Schema, + } for _, opt := range opts { opt2, ok := opt.(WriteOption2) if !ok { From 164f5ee8978b518e4a779420fbf6cf405fc9c1ed Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 9 Sep 2024 10:12:11 -0700 Subject: [PATCH 04/34] Updated --- test/schema_registry_test.go | 23 +++++++++++++++++++---- writer.go | 25 ------------------------- 2 files changed, 19 insertions(+), 29 deletions(-) diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 1655e0c..d3c1783 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -36,7 +36,7 @@ const enableKafkaBrokerTest = "ENABLE_KAFKA_BROKER_TESTS" // but this behavior is still problematic because the implicit schema resolution is used to communicate with schema registry // and determine the appropriate schemaID to embed. The omitted default means the correct schema isn't found. // -// The two message sucesfully writing means the two schemas are registered. +// The two message successfully writing means the two schemas are registered. // We then test we can use the confluent deserializer to decode the messages. For both schema1 and schema2. // This confirms that backwards/forward compatible evolution is possible and old schemas can still read messages from new. func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { @@ -55,7 +55,7 @@ func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFro defer func() { require.NoError(t, client.Close()) }() t.Log("Created writer with auto registered schemas") - writer, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), Topic: topic, Formatter: zkafka.AvroConfluentFmt, @@ -63,6 +63,21 @@ func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFro URL: "http://localhost:8081", Serialization: zkafka.SerializationConfig{ AutoRegisterSchemas: true, + Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema2, }, }, }) @@ -76,7 +91,7 @@ func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFro BytesField: []byte(uuid.NewString()), } // write msg1, and msg2 - _, err = writer.Write(ctx, evt1, zkafka.WithAvroSchema(dummyEventSchema1)) + _, err = writer1.Write(ctx, evt1) require.NoError(t, err) evt2 := heetch2.DummyEvent{ @@ -87,7 +102,7 @@ func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFro BytesField: []byte(uuid.NewString()), NewFieldWithDefault: ptr(uuid.NewString()), } - _, err = writer.Write(ctx, evt2, zkafka.WithAvroSchema(dummyEventSchema2)) + _, err = writer2.Write(ctx, evt2) require.NoError(t, err) consumerTopicConfig := zkafka.ConsumerTopicConfig{ diff --git a/writer.go b/writer.go index d6feb43..2f9becc 100644 --- a/writer.go +++ b/writer.go @@ -197,13 +197,6 @@ func (w *KWriter) write(ctx context.Context, msg keyValuePair, opts ...WriteOpti s := writeSettings{ avroSchema: w.topicConfig.SchemaRegistry.Serialization.Schema, } - for _, opt := range opts { - opt2, ok := opt.(WriteOption2) - if !ok { - continue - } - opt2.applySettings(&s) - } value, err := w.marshall(ctx, msg.value, s.avroSchema) if err != nil { return Response{}, err @@ -251,15 +244,9 @@ type writeSettings struct { avroSchema string } -type WriteOption2 interface { - applySettings(s *writeSettings) -} - // WithHeaders allows for the specification of headers. Specified headers will override collisions. func WithHeaders(headers map[string]string) WriteOption { return withHeaderOption{headers: headers} } -func WithAvroSchema(schema string) WriteOption { return withAvroSchema{schema: schema} } - type withHeaderOption struct { headers map[string]string } @@ -282,15 +269,3 @@ func (o withHeaderOption) apply(s *kafka.Message) { updateHeaders(k, v) } } - -var _ WriteOption2 = (*withAvroSchema)(nil) - -type withAvroSchema struct { - schema string -} - -func (o withAvroSchema) apply(s *kafka.Message) {} - -func (o withAvroSchema) applySettings(s *writeSettings) { - s.avroSchema = o.schema -} From bb5ab136430384c7e817be788dd2857cd1f54070 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 9 Sep 2024 10:45:14 -0700 Subject: [PATCH 05/34] Updated --- formatter.go | 26 +++++------ test/schema_registry_test.go | 83 ++++++++++++++++++++++++++++++++++++ 2 files changed, 96 insertions(+), 13 deletions(-) diff --git a/formatter.go b/formatter.go index 62948ef..dd421f0 100644 --- a/formatter.go +++ b/formatter.go @@ -99,22 +99,22 @@ func newAvroSchemaRegistryFormatter(srConfig SchemaRegistryConfig) (avroSchemaRe } func (f avroSchemaRegistryFormatter) Marshall(topic string, target any, avroSchema string) ([]byte, error) { - if avroSchema != "" { - info := schemaregistry.SchemaInfo{ - Schema: avroSchema, - } - id, err := f.ser.GetID(topic, nil, &info) - if err != nil { - return nil, fmt.Errorf("failed to get avro schema by id: %w", err) - } - f.f.SchemaID = id - return f.f.Marshall(target) + if avroSchema == "" { + return nil, errors.New("avro schema is required for schema registry formatter") + } + info := schemaregistry.SchemaInfo{ + Schema: avroSchema, + } + id, err := f.ser.GetID(topic, nil, &info) + if err != nil { + return nil, fmt.Errorf("failed to get avro schema by id for topic %s: %w", topic, err) } - value, err := f.ser.Serialize(topic, target) + f.f.SchemaID = id + data, err := f.f.Marshall(target) if err != nil { - return nil, fmt.Errorf("failed to serialize confluent schema registry avro type: %w", err) + return nil, fmt.Errorf("failed to marshal avro schema for topic %s: %w", topic, err) } - return value, nil + return data, nil } func (f avroSchemaRegistryFormatter) Unmarshal(topic string, value []byte, target any) error { diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index d3c1783..1c49a22 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -150,6 +150,89 @@ func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFro assertEqual(t, evt2, receivedEvt2Schema2) } +func Test_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { + checkShouldSkipTest(t, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: false, + Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + evt1 := heetch1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.ErrorContains(t, err, "failed to get avro schema by id") +} + +// Its possible not specify a schema for your producer. +// In this case, the underlying lib does +func Test_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { + checkShouldSkipTest(t, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + // don't specify schema uses implicit handling + Schema: "", + }, + }, + }) + require.NoError(t, err) + + evt1 := heetch1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.ErrorContains(t, err, "avro schema is required for schema registry formatter") +} + func checkShouldSkipTest(t *testing.T, flags ...string) { t.Helper() for _, flag := range flags { From 27cb9bbc1963795d13ea005a478452d8214de1f9 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 9 Sep 2024 21:22:10 -0700 Subject: [PATCH 06/34] Updated --- client.go | 27 ++++++++++- client_test.go | 8 ++-- formatter.go | 87 ++++++++++++++++++++++++++---------- reader.go | 14 +++++- reader_test.go | 44 +++++++++++------- test/schema_registry_test.go | 68 ++++++++++++++++++++++++++++ writer.go | 15 ++++++- writer_test.go | 2 +- 8 files changed, 214 insertions(+), 51 deletions(-) diff --git a/client.go b/client.go index d9c5c12..3a5bb9c 100644 --- a/client.go +++ b/client.go @@ -4,8 +4,11 @@ package zkafka import ( "context" + "errors" + "fmt" "sync" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/trace" ) @@ -34,6 +37,9 @@ type Client struct { tp trace.TracerProvider p propagation.TextMapPropagator + mmu sync.Mutex + srCls map[string]schemaregistry.Client + // confluent dependencies producerProvider confluentProducerProvider consumerProvider confluentConsumerProvider @@ -45,6 +51,7 @@ func NewClient(conf Config, opts ...Option) *Client { conf: conf, readers: make(map[string]*KReader), writers: make(map[string]*KWriter), + srCls: make(map[string]schemaregistry.Client), logger: NoopLogger{}, producerProvider: defaultConfluentProducerProvider{}.NewProducer, @@ -77,7 +84,7 @@ func (c *Client) Reader(_ context.Context, topicConfig ConsumerTopicConfig, opts return r, nil } - reader, err := newReader(c.conf, topicConfig, c.consumerProvider, c.logger, c.groupPrefix) + reader, err := newReader(c.conf, topicConfig, c.consumerProvider, c.logger, c.groupPrefix, c.schemaCl) if err != nil { return nil, err } @@ -112,7 +119,7 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts if exist && !w.isClosed { return w, nil } - writer, err := newWriter(c.conf, topicConfig, c.producerProvider) + writer, err := newWriter(c.conf, topicConfig, c.producerProvider, c.schemaCl) if err != nil { return nil, err } @@ -150,6 +157,22 @@ func (c *Client) Close() error { return err } +func (c *Client) schemaCl(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { + url := srConfig.URL + if url == "" { + return nil, errors.New("no schema registry url provided") + } + if srCl, ok := c.srCls[url]; ok { + return srCl, nil + } + client, err := schemaregistry.NewClient(schemaregistry.NewConfig(url)) + if err != nil { + return nil, fmt.Errorf("failed to create schema registry client: %w", err) + } + c.srCls[url] = client + return client, nil +} + func getTracer(tp trace.TracerProvider) trace.Tracer { if tp == nil { return nil diff --git a/client_test.go b/client_test.go index 4f3e75e..c2fa738 100644 --- a/client_test.go +++ b/client_test.go @@ -462,11 +462,11 @@ func TestClient_Close(t *testing.T) { }.NewConsumer r1, err := newReader(Config{}, ConsumerTopicConfig{ Formatter: zfmt.StringFmt, - }, m, &NoopLogger{}, "") + }, m, &NoopLogger{}, "", nil) require.NoError(t, err) r2, err := newReader(Config{}, ConsumerTopicConfig{ Formatter: zfmt.StringFmt, - }, m, &NoopLogger{}, "") + }, m, &NoopLogger{}, "", nil) require.NoError(t, err) tests := []struct { name string @@ -588,7 +588,7 @@ func Test_getFormatter_Consumer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got, _, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID, SchemaRegistryConfig{}) + got, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID) if tt.wantErr { require.Error(t, err) } else { @@ -625,7 +625,7 @@ func Test_getFormatter_Producer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got, _, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID, SchemaRegistryConfig{}) + got, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID) if tt.wantErr { require.Error(t, err) } else { diff --git a/formatter.go b/formatter.go index dd421f0..9bc2ab2 100644 --- a/formatter.go +++ b/formatter.go @@ -29,19 +29,30 @@ type confluentFormatter interface { Unmarshal(topic string, b []byte, v any) error } -func getFormatter(formatter zfmt.FormatterType, schemaID int, srCfg SchemaRegistryConfig) (Formatter, confluentFormatter, error) { +func getFormatter2(formatter zfmt.FormatterType, srCfg SchemaRegistryConfig, getSR srFunc) (confluentFormatter, error) { switch formatter { case AvroConfluentFmt: - cf, err := newAvroSchemaRegistryFormatter(srCfg) - return nil, cf, err + cl, err := getSR(srCfg) + if err != nil { + return nil, err + } + cf, err := newAvroSchemaRegistryFormatter(cl, srCfg) + return cf, err + default: + return nil, fmt.Errorf("unsupported formatter %s", formatter) + } +} + +func getFormatter(formatter zfmt.FormatterType, schemaID int) (Formatter, error) { + switch formatter { case CustomFmt: - return &errFormatter{}, nil, nil + return &errFormatter{}, nil default: f, err := zfmt.GetFormatter(formatter, schemaID) if err != nil { - return nil, nil, fmt.Errorf("unsupported formatter %s", formatter) + return nil, fmt.Errorf("unsupported formatter %s", formatter) } - return f, nil, nil + return f, nil } } @@ -62,24 +73,32 @@ func (f errFormatter) Unmarshal(_ []byte, _ any) error { var _ confluentFormatter = (*avroSchemaRegistryFormatter)(nil) type avroSchemaRegistryFormatter struct { - deser *avrov2.Deserializer - ser *avrov2.Serializer - f zfmt.SchematizedAvroFormatter + schemaRegistryCl schemaRegistryCl + //deser *avrov2.Deserializer + //ser *avrov2.Serializer + f zfmt.SchematizedAvroFormatter } -func newAvroSchemaRegistryFormatter(srConfig SchemaRegistryConfig) (avroSchemaRegistryFormatter, error) { +// func newAvroConfig(srConfig SchemaRegistryConfig) (*avro.SerializerConfig, *avro.DeserializerConfig, error) { +// url := srConfig.URL +// if url == "" { +// return nil, nil, errors.New("no schema registry url provided") +// } +// deserConfig := avrov2.NewDeserializerConfig() +// +// serConfig := avrov2.NewSerializerConfig() +// serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas +// serConfig.NormalizeSchemas = true +// +// } +func newAvroSchemaRegistryFormatter(cl schemaregistry.Client, srConfig SchemaRegistryConfig) (avroSchemaRegistryFormatter, error) { url := srConfig.URL if url == "" { return avroSchemaRegistryFormatter{}, errors.New("no schema registry url provided") - - } - client, err := schemaregistry.NewClient(schemaregistry.NewConfig(url)) - if err != nil { - return avroSchemaRegistryFormatter{}, fmt.Errorf("failed to create schema registry client: %w", err) } deserConfig := avrov2.NewDeserializerConfig() - deser, err := avrov2.NewDeserializer(client, serde.ValueSerde, deserConfig) + deser, err := avrov2.NewDeserializer(cl, serde.ValueSerde, deserConfig) if err != nil { return avroSchemaRegistryFormatter{}, fmt.Errorf("failed to create deserializer: %w", err) } @@ -88,13 +107,16 @@ func newAvroSchemaRegistryFormatter(srConfig SchemaRegistryConfig) (avroSchemaRe serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas serConfig.NormalizeSchemas = true - ser, err := avrov2.NewSerializer(client, serde.ValueSerde, serConfig) + ser, err := avrov2.NewSerializer(cl, serde.ValueSerde, serConfig) if err != nil { return avroSchemaRegistryFormatter{}, fmt.Errorf("failed to create serializer: %w", err) } - return avroSchemaRegistryFormatter{ - deser: deser, + shimcl := shim{ ser: ser, + deser: deser, + } + return avroSchemaRegistryFormatter{ + schemaRegistryCl: shimcl, }, nil } @@ -102,10 +124,7 @@ func (f avroSchemaRegistryFormatter) Marshall(topic string, target any, avroSche if avroSchema == "" { return nil, errors.New("avro schema is required for schema registry formatter") } - info := schemaregistry.SchemaInfo{ - Schema: avroSchema, - } - id, err := f.ser.GetID(topic, nil, &info) + id, err := f.schemaRegistryCl.GetID(topic, avroSchema) if err != nil { return nil, fmt.Errorf("failed to get avro schema by id for topic %s: %w", topic, err) } @@ -118,9 +137,29 @@ func (f avroSchemaRegistryFormatter) Marshall(topic string, target any, avroSche } func (f avroSchemaRegistryFormatter) Unmarshal(topic string, value []byte, target any) error { - err := f.deser.DeserializeInto(topic, value, &target) + err := f.schemaRegistryCl.DeserializeInto(topic, value, &target) if err != nil { return fmt.Errorf("failed to deserialize to confluent schema registry avro type: %w", err) } return nil } + +type schemaRegistryCl interface { + GetID(topic string, avroSchema string) (int, error) + DeserializeInto(topic string, value []byte, target any) error +} + +var _ schemaRegistryCl = (*shim)(nil) + +type shim struct { + ser *avrov2.Serializer + deser *avrov2.Deserializer +} + +func (s shim) GetID(topic string, avroSchema string) (int, error) { + return s.ser.GetID(topic, nil, &schemaregistry.SchemaInfo{Schema: avroSchema}) +} + +func (s shim) DeserializeInto(topic string, value []byte, target any) error { + return s.deser.DeserializeInto(topic, value, target) +} diff --git a/reader.go b/reader.go index c1f9ced..1e98110 100644 --- a/reader.go +++ b/reader.go @@ -11,6 +11,7 @@ import ( "time" "github.com/confluentinc/confluent-kafka-go/v2/kafka" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" ) //// go:generate mockgen -destination=./mocks/mock_metrics.go -source=reader.go @@ -52,15 +53,24 @@ type KReader struct { tCommitMgr *topicCommitMgr } +type srFunc func(_ SchemaRegistryConfig) (schemaregistry.Client, error) + // newReader makes a new reader based on the configurations -func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentConsumerProvider, logger Logger, prefix string) (*KReader, error) { +func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentConsumerProvider, logger Logger, prefix string, getSR srFunc) (*KReader, error) { confluentConfig := makeConsumerConfig(conf, topicConfig, prefix) consumer, err := provider(confluentConfig) if err != nil { return nil, err } - formatter, cFormatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry) + var formatter Formatter + var cFormatter confluentFormatter + switch topicConfig.Formatter { + case AvroConfluentFmt: + cFormatter, err = getFormatter2(topicConfig.Formatter, topicConfig.SchemaRegistry, getSR) + default: + formatter, err = getFormatter(topicConfig.Formatter, topicConfig.SchemaID) + } if err != nil { return nil, err } diff --git a/reader_test.go b/reader_test.go index b4d536c..f38e01f 100644 --- a/reader_test.go +++ b/reader_test.go @@ -31,7 +31,7 @@ func TestReader_Read_NilReturn(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) got, err := r.Read(context.TODO()) require.NoError(t, err) @@ -59,7 +59,8 @@ func TestReader_Read(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + require.NoError(t, err) got, err := r.Read(context.TODO()) require.NoError(t, err) @@ -95,7 +96,8 @@ func TestReader_Read_Error(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + require.NoError(t, err) got, err := r.Read(context.TODO()) require.Error(t, err) @@ -127,7 +129,8 @@ func TestReader_Read_TimeoutError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + require.NoError(t, err) got, err := r.Read(context.TODO()) require.NoError(t, err, "expect no error to be returned on timeout") @@ -146,9 +149,10 @@ func TestReader_Read_SubscriberError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + require.NoError(t, err) - _, err := r.Read(context.TODO()) + _, err = r.Read(context.TODO()) require.Error(t, err, "expect an error to bubble up on Read because of subscribe error") } @@ -166,10 +170,10 @@ func TestReader_Read_CloseError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") - - err := r.Close() + r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + require.NoError(t, err) + err = r.Close() require.Error(t, err) } @@ -187,9 +191,10 @@ func TestReader_ReadWhenConnectionIsClosed(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "") + r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + require.NoError(t, err) - err := r.Close() + err = r.Close() require.NoError(t, err) _, err = r.Read(context.TODO()) require.Error(t, err, "KReader.Read() message should return error due to connection lost") @@ -247,7 +252,7 @@ func Test_newReader(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - _, err := newReader(tt.args.conf, tt.args.topicConfig, tt.args.consumeProvider, &NoopLogger{}, "") + _, err := newReader(tt.args.conf, tt.args.topicConfig, tt.args.consumeProvider, &NoopLogger{}, "", nil) if tt.wantErr { require.Error(t, err) } else { @@ -279,7 +284,9 @@ func Test_ProcessMessage(t *testing.T) { m := mockConfluentConsumerProvider{ c: mock_confluent.NewMockKafkaConsumer(ctrl), }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") + r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + require.NoError(t, err) + got := r.mapMessage(context.Background(), dupMessage) require.Equal(t, got.Partition, dupMessage.TopicPartition.Partition) @@ -310,7 +317,9 @@ func Test_ProcessMultipleMessagesFromDifferentTopics_UpdatesInternalStateProperl m := mockConfluentConsumerProvider{ c: mock_confluent.NewMockKafkaConsumer(ctrl), }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") + r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + require.NoError(t, err) + for _, msg := range msgs { got := r.mapMessage(context.Background(), msg) require.Equal(t, got.Partition, msg.TopicPartition.Partition) @@ -350,7 +359,8 @@ func Test_ProcessMessage_StoreOffsetError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") + r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + require.NoError(t, err) mgr := newTopicCommitMgr() cmgr := mgr.get(*dupMessage.TopicPartition.Topic) @@ -397,7 +407,9 @@ func Test_ProcessMessage_SetError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &l, "") + r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + require.NoError(t, err) + mgr := newTopicCommitMgr() cmgr := mgr.get(*dupMessage.TopicPartition.Topic) cmgr.PushInWork(dupMessage.TopicPartition) diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 1c49a22..2bf6c44 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -233,6 +233,74 @@ func Test_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { require.ErrorContains(t, err, "avro schema is required for schema registry formatter") } +func TestXXXX(t *testing.T) { + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + evt1 := heetch1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 1) + require.NoError(t, err) + + msg1 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := heetch1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1) +} + func checkShouldSkipTest(t *testing.T, flags ...string) { t.Helper() for _, flag := range flags { diff --git a/writer.go b/writer.go index 2f9becc..2350bda 100644 --- a/writer.go +++ b/writer.go @@ -60,13 +60,24 @@ type keyValuePair struct { value any } -func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentProducerProvider) (*KWriter, error) { +func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentProducerProvider, getSR srFunc) (*KWriter, error) { confluentConfig := makeProducerConfig(conf, topicConfig) p, err := producer(confluentConfig) if err != nil { return nil, err } - formatter, cFormatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry) + //formatter, cFormatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry) + //if err != nil { + // return nil, err + //} + var formatter Formatter + var cFormatter confluentFormatter + switch topicConfig.Formatter { + case AvroConfluentFmt: + cFormatter, err = getFormatter2(topicConfig.Formatter, topicConfig.SchemaRegistry, getSR) + default: + formatter, err = getFormatter(topicConfig.Formatter, topicConfig.SchemaID) + } if err != nil { return nil, err } diff --git a/writer_test.go b/writer_test.go index 0fc56b7..b826889 100644 --- a/writer_test.go +++ b/writer_test.go @@ -542,7 +542,7 @@ func Test_newWriter(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { recoverThenFail(t) - w, err := newWriter(tt.args.conf, tt.args.topicConfig, tt.args.producerP) + w, err := newWriter(tt.args.conf, tt.args.topicConfig, tt.args.producerP, nil) if tt.wantErr { require.Error(t, err, "expected error for newWriter()") } else { From c8f682077a6dbdd8ee13b413fbad16fa28c8ca8a Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 9 Sep 2024 21:56:54 -0700 Subject: [PATCH 07/34] Updated tests --- formatter.go | 2 +- reader.go | 4 +- test/schema_registry_test.go | 141 +++++++++++++++++++++++------------ writer.go | 8 +- 4 files changed, 97 insertions(+), 58 deletions(-) diff --git a/formatter.go b/formatter.go index 9bc2ab2..6138367 100644 --- a/formatter.go +++ b/formatter.go @@ -29,7 +29,7 @@ type confluentFormatter interface { Unmarshal(topic string, b []byte, v any) error } -func getFormatter2(formatter zfmt.FormatterType, srCfg SchemaRegistryConfig, getSR srFunc) (confluentFormatter, error) { +func getFormatter2(formatter zfmt.FormatterType, srCfg SchemaRegistryConfig, getSR srProvider) (confluentFormatter, error) { switch formatter { case AvroConfluentFmt: cl, err := getSR(srCfg) diff --git a/reader.go b/reader.go index 1e98110..109b532 100644 --- a/reader.go +++ b/reader.go @@ -53,10 +53,10 @@ type KReader struct { tCommitMgr *topicCommitMgr } -type srFunc func(_ SchemaRegistryConfig) (schemaregistry.Client, error) +type srProvider func(_ SchemaRegistryConfig) (schemaregistry.Client, error) // newReader makes a new reader based on the configurations -func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentConsumerProvider, logger Logger, prefix string, getSR srFunc) (*KReader, error) { +func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentConsumerProvider, logger Logger, prefix string, getSR srProvider) (*KReader, error) { confluentConfig := makeConsumerConfig(conf, topicConfig, prefix) consumer, err := provider(confluentConfig) if err != nil { diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 2bf6c44..3b9853b 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -39,8 +39,8 @@ const enableKafkaBrokerTest = "ENABLE_KAFKA_BROKER_TESTS" // The two message successfully writing means the two schemas are registered. // We then test we can use the confluent deserializer to decode the messages. For both schema1 and schema2. // This confirms that backwards/forward compatible evolution is possible and old schemas can still read messages from new. -func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { - checkShouldSkipTest(t, enableSchemaRegistryTest) +func Test_SchemaRegistryReal_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) ctx := context.Background() topic := "integration-test-topic-2" + uuid.NewString() @@ -150,8 +150,8 @@ func Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFro assertEqual(t, evt2, receivedEvt2Schema2) } -func Test_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { - checkShouldSkipTest(t, enableSchemaRegistryTest) +func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) ctx := context.Background() topic := "integration-test-topic-2" + uuid.NewString() @@ -160,6 +160,8 @@ func Test_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { createTopic(t, bootstrapServer, topic, 1) t.Logf("Created topic: %s", topic) + groupID := uuid.NewString() + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) defer func() { require.NoError(t, client.Close()) }() @@ -169,15 +171,29 @@ func Test_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { Topic: topic, Formatter: zkafka.AvroConfluentFmt, SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "http://localhost:8081", + URL: "mock://", Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: false, + AutoRegisterSchemas: true, Schema: dummyEventSchema1, }, }, }) require.NoError(t, err) + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema2, + }, + }, + }) + require.NoError(t, err) + evt1 := heetch1.DummyEvent{ IntField: int(rand.Int31()), DoubleField: rand.Float64(), @@ -187,13 +203,66 @@ func Test_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { } // write msg1, and msg2 _, err = writer1.Write(ctx, evt1) - require.ErrorContains(t, err, "failed to get avro schema by id") + require.NoError(t, err) + + evt2 := heetch2.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewFieldWithDefault: ptr(uuid.NewString()), + } + _, err = writer2.Write(ctx, evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroConfluentFmt, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := heetch1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1) + + receivedEvt2Schema1 := heetch1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := heetch1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1) + + receivedEvt2Schema2 := heetch2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2) } -// Its possible not specify a schema for your producer. -// In this case, the underlying lib does -func Test_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { - checkShouldSkipTest(t, enableSchemaRegistryTest) +func Test_SchemaRegistry_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) ctx := context.Background() topic := "integration-test-topic-2" + uuid.NewString() @@ -211,11 +280,10 @@ func Test_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { Topic: topic, Formatter: zkafka.AvroConfluentFmt, SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "http://localhost:8081", + URL: "mock://", Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - // don't specify schema uses implicit handling - Schema: "", + AutoRegisterSchemas: false, + Schema: dummyEventSchema1, }, }, }) @@ -230,10 +298,14 @@ func Test_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { } // write msg1, and msg2 _, err = writer1.Write(ctx, evt1) - require.ErrorContains(t, err, "avro schema is required for schema registry formatter") + require.ErrorContains(t, err, "failed to get avro schema by id") } -func TestXXXX(t *testing.T) { +// Its possible not specify a schema for your producer. +// In this case, the underlying lib does +func Test_SchemaRegistry_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + ctx := context.Background() topic := "integration-test-topic-2" + uuid.NewString() bootstrapServer := getBootstrap() @@ -241,8 +313,6 @@ func TestXXXX(t *testing.T) { createTopic(t, bootstrapServer, topic, 1) t.Logf("Created topic: %s", topic) - groupID := uuid.NewString() - client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) defer func() { require.NoError(t, client.Close()) }() @@ -255,7 +325,8 @@ func TestXXXX(t *testing.T) { URL: "mock://", Serialization: zkafka.SerializationConfig{ AutoRegisterSchemas: true, - Schema: dummyEventSchema1, + // don't specify schema uses implicit handling + Schema: "", }, }, }) @@ -270,35 +341,7 @@ func TestXXXX(t *testing.T) { } // write msg1, and msg2 _, err = writer1.Write(ctx, evt1) - require.NoError(t, err) - - consumerTopicConfig := zkafka.ConsumerTopicConfig{ - ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.AvroConfluentFmt, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - }, - GroupID: groupID, - AdditionalProps: map[string]any{ - "auto.offset.reset": "earliest", - }, - } - reader, err := client.Reader(ctx, consumerTopicConfig) - require.NoError(t, err) - - t.Log("Begin reading messages") - results, err := readMessages(reader, 1) - require.NoError(t, err) - - msg1 := <-results - t.Log("Close reader") - - require.NoError(t, reader.Close()) - - receivedEvt1 := heetch1.DummyEvent{} - require.NoError(t, msg1.Decode(&receivedEvt1)) - assertEqual(t, evt1, receivedEvt1) + require.ErrorContains(t, err, "avro schema is required for schema registry formatter") } func checkShouldSkipTest(t *testing.T, flags ...string) { diff --git a/writer.go b/writer.go index 2350bda..2518b4c 100644 --- a/writer.go +++ b/writer.go @@ -60,21 +60,17 @@ type keyValuePair struct { value any } -func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentProducerProvider, getSR srFunc) (*KWriter, error) { +func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentProducerProvider, srProvider srProvider) (*KWriter, error) { confluentConfig := makeProducerConfig(conf, topicConfig) p, err := producer(confluentConfig) if err != nil { return nil, err } - //formatter, cFormatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry) - //if err != nil { - // return nil, err - //} var formatter Formatter var cFormatter confluentFormatter switch topicConfig.Formatter { case AvroConfluentFmt: - cFormatter, err = getFormatter2(topicConfig.Formatter, topicConfig.SchemaRegistry, getSR) + cFormatter, err = getFormatter2(topicConfig.Formatter, topicConfig.SchemaRegistry, srProvider) default: formatter, err = getFormatter(topicConfig.Formatter, topicConfig.SchemaID) } From ce08024c3f2e7ea51d119dedd9e24750eeaa4e45 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Tue, 10 Sep 2024 06:54:40 -0700 Subject: [PATCH 08/34] Updated formatter --- client_test.go | 16 +++++------ formatter.go | 71 ++++++++++++++++++++++++++++++++--------------- formatter_test.go | 4 +-- message.go | 39 +++++++++++++------------- message_test.go | 8 +++--- reader.go | 32 ++++++++------------- testhelper.go | 2 +- writer.go | 28 +++++++------------ writer_test.go | 26 +++++++++-------- 9 files changed, 117 insertions(+), 109 deletions(-) diff --git a/client_test.go b/client_test.go index c2fa738..f7ee0de 100644 --- a/client_test.go +++ b/client_test.go @@ -203,7 +203,7 @@ func TestClient_Reader(t *testing.T) { MaxPollIntervalMillis: ptr(61000), }, logger: NoopLogger{}, - fmtter: &zfmt.AvroFormatter{}, + fmtter: f1{&zfmt.AvroFormatter{}}, }, wantErr: false, }, @@ -235,7 +235,7 @@ func TestClient_Reader(t *testing.T) { MaxPollIntervalMillis: ptr(21000), }, logger: NoopLogger{}, - fmtter: &zfmt.AvroFormatter{}, + fmtter: f1{&zfmt.AvroFormatter{}}, }, wantErr: false, }, @@ -364,7 +364,7 @@ func TestClient_Writer(t *testing.T) { logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, - fmtter: &zfmt.ProtobufRawFormatter{}, + fmtter: f1{&zfmt.ProtobufRawFormatter{}}, }, wantErr: false, }, @@ -393,7 +393,7 @@ func TestClient_Writer(t *testing.T) { logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, - fmtter: &zfmt.ProtobufRawFormatter{}, + fmtter: f1{&zfmt.ProtobufRawFormatter{}}, }, wantErr: false, }, @@ -588,12 +588,12 @@ func Test_getFormatter_Consumer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID) + got, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID, SchemaRegistryConfig{}, nil) if tt.wantErr { require.Error(t, err) } else { require.NoError(t, err) - require.Equal(t, tt.want, got) + require.Equal(t, f1{tt.want}, got) } }) } @@ -625,12 +625,12 @@ func Test_getFormatter_Producer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID) + got, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID, SchemaRegistryConfig{}, nil) if tt.wantErr { require.Error(t, err) } else { require.NoError(t, err) - require.Equal(t, tt.want, got) + require.Equal(t, f1{tt.want}, got) } }) } diff --git a/formatter.go b/formatter.go index 6138367..34bcef3 100644 --- a/formatter.go +++ b/formatter.go @@ -24,12 +24,44 @@ type Formatter interface { Unmarshal(b []byte, v any) error } -type confluentFormatter interface { - Marshall(topic string, v any, schema string) ([]byte, error) - Unmarshal(topic string, b []byte, v any) error +//type confluentFormatter interface { +// Marshall(topic string, v any, schema string) ([]byte, error) +// Unmarshal(topic string, b []byte, v any) error +//} + +type marshReq struct { + topic string + subject any + schema string } -func getFormatter2(formatter zfmt.FormatterType, srCfg SchemaRegistryConfig, getSR srProvider) (confluentFormatter, error) { +type unmarshReq struct { + topic string + data []byte + target any +} + +type ultimateFormatter interface { + Marshall(req marshReq) ([]byte, error) + Unmarshal(req unmarshReq) error +} + +var _ ultimateFormatter = (*avroSchemaRegistryFormatter)(nil) +var _ ultimateFormatter = (*f1)(nil) + +type f1 struct { + F zfmt.Formatter +} + +func (f f1) Marshall(req marshReq) ([]byte, error) { + return f.F.Marshall(req.subject) +} + +func (f f1) Unmarshal(req unmarshReq) error { + return f.F.Unmarshal(req.data, req.target) +} + +func getFormatter(formatter zfmt.FormatterType, schemaID int, srCfg SchemaRegistryConfig, getSR srProvider) (ultimateFormatter, error) { switch formatter { case AvroConfluentFmt: cl, err := getSR(srCfg) @@ -38,13 +70,6 @@ func getFormatter2(formatter zfmt.FormatterType, srCfg SchemaRegistryConfig, get } cf, err := newAvroSchemaRegistryFormatter(cl, srCfg) return cf, err - default: - return nil, fmt.Errorf("unsupported formatter %s", formatter) - } -} - -func getFormatter(formatter zfmt.FormatterType, schemaID int) (Formatter, error) { - switch formatter { case CustomFmt: return &errFormatter{}, nil default: @@ -52,7 +77,7 @@ func getFormatter(formatter zfmt.FormatterType, schemaID int) (Formatter, error) if err != nil { return nil, fmt.Errorf("unsupported formatter %s", formatter) } - return f, nil + return f1{F: f}, nil } } @@ -61,16 +86,16 @@ func getFormatter(formatter zfmt.FormatterType, schemaID int) (Formatter, error) type errFormatter struct{} // Marshall returns error with reminder -func (f errFormatter) Marshall(_ any) ([]byte, error) { +func (f errFormatter) Marshall(req marshReq) ([]byte, error) { return nil, errMissingFmtter } // Unmarshal returns error with reminder -func (f errFormatter) Unmarshal(_ []byte, _ any) error { +func (f errFormatter) Unmarshal(req unmarshReq) error { return errMissingFmtter } -var _ confluentFormatter = (*avroSchemaRegistryFormatter)(nil) +//var _ confluentFormatter = (*avroSchemaRegistryFormatter)(nil) type avroSchemaRegistryFormatter struct { schemaRegistryCl schemaRegistryCl @@ -120,24 +145,24 @@ func newAvroSchemaRegistryFormatter(cl schemaregistry.Client, srConfig SchemaReg }, nil } -func (f avroSchemaRegistryFormatter) Marshall(topic string, target any, avroSchema string) ([]byte, error) { - if avroSchema == "" { +func (f avroSchemaRegistryFormatter) Marshall(req marshReq) ([]byte, error) { + if req.schema == "" { return nil, errors.New("avro schema is required for schema registry formatter") } - id, err := f.schemaRegistryCl.GetID(topic, avroSchema) + id, err := f.schemaRegistryCl.GetID(req.topic, req.schema) if err != nil { - return nil, fmt.Errorf("failed to get avro schema by id for topic %s: %w", topic, err) + return nil, fmt.Errorf("failed to get avro schema by id for topic %s: %w", req.topic, err) } f.f.SchemaID = id - data, err := f.f.Marshall(target) + data, err := f.f.Marshall(req.subject) if err != nil { - return nil, fmt.Errorf("failed to marshal avro schema for topic %s: %w", topic, err) + return nil, fmt.Errorf("failed to marshal avro schema for topic %s: %w", req.topic, err) } return data, nil } -func (f avroSchemaRegistryFormatter) Unmarshal(topic string, value []byte, target any) error { - err := f.schemaRegistryCl.DeserializeInto(topic, value, &target) +func (f avroSchemaRegistryFormatter) Unmarshal(req unmarshReq) error { + err := f.schemaRegistryCl.DeserializeInto(req.topic, req.data, &req.target) if err != nil { return fmt.Errorf("failed to deserialize to confluent schema registry avro type: %w", err) } diff --git a/formatter_test.go b/formatter_test.go index a842129..65825a5 100644 --- a/formatter_test.go +++ b/formatter_test.go @@ -9,10 +9,10 @@ import ( func TestNoopFormatter_Marshall_Unmarshal(t *testing.T) { defer recoverThenFail(t) fmtter := errFormatter{} - _, err := fmtter.Marshall("anything") + _, err := fmtter.Marshall(marshReq{subject: "anything"}) require.ErrorIs(t, err, errMissingFmtter) var someInt int32 - err = fmtter.Unmarshal([]byte("test"), &someInt) + err = fmtter.Unmarshal(unmarshReq{data: []byte("test"), target: &someInt}) require.ErrorIs(t, err, errMissingFmtter) } diff --git a/message.go b/message.go index df8c512..20d7bcd 100644 --- a/message.go +++ b/message.go @@ -7,7 +7,6 @@ import ( "time" "github.com/confluentinc/confluent-kafka-go/v2/kafka" - "github.com/zillow/zfmt" ) // Message is a container for kafka message @@ -15,19 +14,18 @@ type Message struct { Key string // There's a difference between a nil key and an empty key. A nil key gets assigned a topic partition by kafka via round-robin. // An empty key is treated as a key with a value of "" and is assigned to a topic partition via the hash of the key (so will consistently go to the same key) - isKeyNil bool - Headers map[string][]byte - Offset int64 - Partition int32 - Topic string - GroupID string - TimeStamp time.Time - value []byte - topicPartition kafka.TopicPartition - fmt zfmt.Formatter - confluentFormatter confluentFormatter - doneFunc func(ctx context.Context) - doneOnce sync.Once + isKeyNil bool + Headers map[string][]byte + Offset int64 + Partition int32 + Topic string + GroupID string + TimeStamp time.Time + value []byte + topicPartition kafka.TopicPartition + fmt ultimateFormatter + doneFunc func(ctx context.Context) + doneOnce sync.Once } // DoneWithContext is used to alert that message processing has completed. @@ -58,13 +56,14 @@ func (m *Message) Decode(v any) error { } func (m *Message) unmarshall(target any) error { - if m.fmt != nil { - return m.fmt.Unmarshal(m.value, target) + if m.fmt == nil { + return errors.New("formatter or confluent formatter is not supplied to decode kafka message") } - if m.confluentFormatter != nil { - return m.confluentFormatter.Unmarshal(m.Topic, m.value, target) - } - return errors.New("formatter or confluent formatter is not supplied to decode kafka message") + return m.fmt.Unmarshal(unmarshReq{ + topic: m.Topic, + data: m.value, + target: target, + }) } // Value returns a copy of the current value byte array. Useful for debugging diff --git a/message_test.go b/message_test.go index dd8d1c9..5e57f52 100644 --- a/message_test.go +++ b/message_test.go @@ -104,7 +104,7 @@ func TestMessage_Headers(t *testing.T) { func TestMessage_Decode(t *testing.T) { type fields struct { value []byte - fmt zfmt.Formatter + fmt ultimateFormatter } type args struct { v any @@ -133,7 +133,7 @@ func TestMessage_Decode(t *testing.T) { name: "valid message, formatter, empty input => error", fields: fields{ value: []byte("test"), - fmt: &zfmt.StringFormatter{}, + fmt: f1{&zfmt.StringFormatter{}}, }, args: args{}, wantErr: true, @@ -142,7 +142,7 @@ func TestMessage_Decode(t *testing.T) { name: "valid message, formatter, valid input => no error", fields: fields{ value: []byte("test"), - fmt: &zfmt.StringFormatter{}, + fmt: f1{&zfmt.StringFormatter{}}, }, args: args{ v: &bytes.Buffer{}, @@ -194,7 +194,7 @@ func TestMessage_Done(t *testing.T) { Key: tt.fields.Key, Headers: tt.fields.Headers, value: tt.fields.value, - fmt: tt.fields.fmt, + fmt: f1{F: tt.fields.fmt}, doneFunc: func(ctx context.Context) { isCalled = true }, diff --git a/reader.go b/reader.go index 109b532..8579516 100644 --- a/reader.go +++ b/reader.go @@ -44,8 +44,7 @@ type KReader struct { topicConfig ConsumerTopicConfig isClosed bool - fmtter Formatter - confluentFormatter confluentFormatter + fmtter ultimateFormatter logger Logger lifecycle LifecycleHooks @@ -56,32 +55,24 @@ type KReader struct { type srProvider func(_ SchemaRegistryConfig) (schemaregistry.Client, error) // newReader makes a new reader based on the configurations -func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentConsumerProvider, logger Logger, prefix string, getSR srProvider) (*KReader, error) { +func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentConsumerProvider, logger Logger, prefix string, srProvider srProvider) (*KReader, error) { confluentConfig := makeConsumerConfig(conf, topicConfig, prefix) consumer, err := provider(confluentConfig) if err != nil { return nil, err } - var formatter Formatter - var cFormatter confluentFormatter - switch topicConfig.Formatter { - case AvroConfluentFmt: - cFormatter, err = getFormatter2(topicConfig.Formatter, topicConfig.SchemaRegistry, getSR) - default: - formatter, err = getFormatter(topicConfig.Formatter, topicConfig.SchemaID) - } + formatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry, srProvider) if err != nil { return nil, err } return &KReader{ - consumer: consumer, - fmtter: formatter, - confluentFormatter: cFormatter, - topicConfig: topicConfig, - logger: logger, - tCommitMgr: newTopicCommitMgr(), + consumer: consumer, + fmtter: formatter, + topicConfig: topicConfig, + logger: logger, + tCommitMgr: newTopicCommitMgr(), }, nil } @@ -211,9 +202,8 @@ func (r *KReader) mapMessage(_ context.Context, msg kafka.Message) *Message { r.logger.Errorw(ctx, "Error storing offsets", "topicName", topicName, "groupID", r.topicConfig.GroupID, "partition", partition, "offset", offset, "error", err) } }, - value: msg.Value, - fmt: r.fmtter, - confluentFormatter: r.confluentFormatter, + value: msg.Value, + fmt: r.fmtter, } } @@ -306,7 +296,7 @@ type ReaderOption func(*KReader) func RFormatterOption(fmtter Formatter) ReaderOption { return func(r *KReader) { if fmtter != nil { - r.fmtter = fmtter + r.fmtter = f1{F: fmtter} } } } diff --git a/testhelper.go b/testhelper.go index 59c57eb..988d92b 100644 --- a/testhelper.go +++ b/testhelper.go @@ -84,7 +84,7 @@ func GetMsgFromFake(msg *FakeMessage) *Message { Partition: msg.Partition, Offset: kafka.Offset(msg.Offset), }, - fmt: msg.Fmt, + fmt: f1{F: msg.Fmt}, doneFunc: doneFunc, doneOnce: sync.Once{}, } diff --git a/writer.go b/writer.go index 2518b4c..63101b6 100644 --- a/writer.go +++ b/writer.go @@ -46,8 +46,7 @@ type KWriter struct { mu sync.Mutex producer KafkaProducer topicConfig ProducerTopicConfig - fmtter Formatter - cFormatter confluentFormatter + fmtter ultimateFormatter logger Logger tracer trace.Tracer p propagation.TextMapPropagator @@ -66,14 +65,7 @@ func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentP if err != nil { return nil, err } - var formatter Formatter - var cFormatter confluentFormatter - switch topicConfig.Formatter { - case AvroConfluentFmt: - cFormatter, err = getFormatter2(topicConfig.Formatter, topicConfig.SchemaRegistry, srProvider) - default: - formatter, err = getFormatter(topicConfig.Formatter, topicConfig.SchemaID) - } + formatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry, srProvider) if err != nil { return nil, err } @@ -81,7 +73,6 @@ func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentP return &KWriter{ producer: p, fmtter: formatter, - cFormatter: cFormatter, topicConfig: topicConfig, logger: NoopLogger{}, }, nil @@ -213,13 +204,14 @@ func (w *KWriter) write(ctx context.Context, msg keyValuePair, opts ...WriteOpti } func (w *KWriter) marshall(_ context.Context, value any, schema string) ([]byte, error) { - if w.fmtter != nil { - return w.fmtter.Marshall(value) - } - if w.cFormatter != nil { - return w.cFormatter.Marshall(w.topicConfig.Topic, value, schema) + if w.fmtter == nil { + return nil, errors.New("formatter or confluent formatter is not supplied to produce kafka message") } - return nil, errors.New("formatter or confluent formatter is not supplied to produce kafka message") + return w.fmtter.Marshall(marshReq{ + topic: w.topicConfig.Topic, + subject: value, + schema: schema, + }) } // Close terminates the writer gracefully and mark it as closed @@ -237,7 +229,7 @@ type WriterOption func(*KWriter) func WFormatterOption(fmtter Formatter) WriterOption { return func(w *KWriter) { if fmtter != nil { - w.fmtter = fmtter + w.fmtter = f1{F: fmtter} } } } diff --git a/writer_test.go b/writer_test.go index b826889..3d95651 100644 --- a/writer_test.go +++ b/writer_test.go @@ -36,7 +36,7 @@ func TestWriter_Write(t *testing.T) { type fields struct { Mutex *sync.Mutex Producer KafkaProducer - fmt zfmt.Formatter + fmt ultimateFormatter } type args struct { ctx context.Context @@ -50,8 +50,10 @@ func TestWriter_Write(t *testing.T) { wantErr bool }{ { - name: "formatter check at minimum", - fields: fields{}, + name: "formatter check at minimum", + fields: fields{ + fmt: nil, + }, args: args{ctx: context.TODO(), value: "1"}, want: Response{Partition: 0, Offset: 0}, wantErr: true, @@ -59,7 +61,7 @@ func TestWriter_Write(t *testing.T) { { name: "has formatter and producer", fields: fields{ - fmt: &zfmt.StringFormatter{}, + fmt: f1{&zfmt.StringFormatter{}}, Producer: p, }, args: args{ctx: context.TODO(), value: "1"}, @@ -68,7 +70,7 @@ func TestWriter_Write(t *testing.T) { { name: "has formatter, producer, incompatible message type", fields: fields{ - fmt: &zfmt.StringFormatter{}, + fmt: f1{&zfmt.StringFormatter{}}, Producer: p, }, args: args{ctx: context.TODO(), value: 5}, @@ -162,7 +164,7 @@ func TestWriter_WriteKey(t *testing.T) { w := &KWriter{ producer: tt.fields.Producer, topicConfig: tt.fields.conf, - fmtter: tt.fields.fmt, + fmtter: f1{tt.fields.fmt}, isClosed: tt.fields.isClosed, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), @@ -200,7 +202,7 @@ func TestWriter_WriteKeyReturnsImmediateError(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{}, isClosed: false, - fmtter: &zfmt.JSONFormatter{}, + fmtter: f1{&zfmt.JSONFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -238,7 +240,7 @@ func TestWriter_WritesMetrics(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: &zfmt.StringFormatter{}, + fmtter: f1{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -302,7 +304,7 @@ func TestWriter_WriteSpecialCase(t *testing.T) { t.Run(tt.name, func(t *testing.T) { w := &KWriter{ producer: tt.fields.Producer, - fmtter: tt.fields.fmt, + fmtter: f1{tt.fields.fmt}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -346,7 +348,7 @@ func TestWriter_PreWriteLifecycleHookCanAugmentHeaders(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: &zfmt.StringFormatter{}, + fmtter: f1{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -375,7 +377,7 @@ func TestWriter_WithHeadersWriteOptionCanAugmentHeaders(t *testing.T) { wr := &KWriter{ producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, - fmtter: &zfmt.StringFormatter{}, + fmtter: f1{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -433,7 +435,7 @@ func TestWriter_PreWriteLifecycleHookErrorDoesntHaltProcessing(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: &zfmt.StringFormatter{}, + fmtter: f1{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, From 7f1865286c36e43021ea4ecc921f29ae56ed266f Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Tue, 10 Sep 2024 06:58:59 -0700 Subject: [PATCH 09/34] Renamed formatter stuff --- client_test.go | 12 ++++++------ formatter.go | 32 ++++++++++++++++---------------- formatter_test.go | 4 ++-- message.go | 4 ++-- message_test.go | 8 ++++---- reader.go | 4 ++-- testhelper.go | 2 +- writer.go | 6 +++--- writer_test.go | 20 ++++++++++---------- 9 files changed, 46 insertions(+), 46 deletions(-) diff --git a/client_test.go b/client_test.go index f7ee0de..c204f04 100644 --- a/client_test.go +++ b/client_test.go @@ -203,7 +203,7 @@ func TestClient_Reader(t *testing.T) { MaxPollIntervalMillis: ptr(61000), }, logger: NoopLogger{}, - fmtter: f1{&zfmt.AvroFormatter{}}, + fmtter: zfmtShim{&zfmt.AvroFormatter{}}, }, wantErr: false, }, @@ -235,7 +235,7 @@ func TestClient_Reader(t *testing.T) { MaxPollIntervalMillis: ptr(21000), }, logger: NoopLogger{}, - fmtter: f1{&zfmt.AvroFormatter{}}, + fmtter: zfmtShim{&zfmt.AvroFormatter{}}, }, wantErr: false, }, @@ -364,7 +364,7 @@ func TestClient_Writer(t *testing.T) { logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, - fmtter: f1{&zfmt.ProtobufRawFormatter{}}, + fmtter: zfmtShim{&zfmt.ProtobufRawFormatter{}}, }, wantErr: false, }, @@ -393,7 +393,7 @@ func TestClient_Writer(t *testing.T) { logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, - fmtter: f1{&zfmt.ProtobufRawFormatter{}}, + fmtter: zfmtShim{&zfmt.ProtobufRawFormatter{}}, }, wantErr: false, }, @@ -593,7 +593,7 @@ func Test_getFormatter_Consumer(t *testing.T) { require.Error(t, err) } else { require.NoError(t, err) - require.Equal(t, f1{tt.want}, got) + require.Equal(t, zfmtShim{tt.want}, got) } }) } @@ -630,7 +630,7 @@ func Test_getFormatter_Producer(t *testing.T) { require.Error(t, err) } else { require.NoError(t, err) - require.Equal(t, f1{tt.want}, got) + require.Equal(t, zfmtShim{tt.want}, got) } }) } diff --git a/formatter.go b/formatter.go index 34bcef3..f74fb0b 100644 --- a/formatter.go +++ b/formatter.go @@ -25,8 +25,8 @@ type Formatter interface { } //type confluentFormatter interface { -// Marshall(topic string, v any, schema string) ([]byte, error) -// Unmarshal(topic string, b []byte, v any) error +// marshall(topic string, v any, schema string) ([]byte, error) +// unmarshal(topic string, b []byte, v any) error //} type marshReq struct { @@ -41,27 +41,27 @@ type unmarshReq struct { target any } -type ultimateFormatter interface { - Marshall(req marshReq) ([]byte, error) - Unmarshal(req unmarshReq) error +type kFormatter interface { + marshall(req marshReq) ([]byte, error) + unmarshal(req unmarshReq) error } -var _ ultimateFormatter = (*avroSchemaRegistryFormatter)(nil) -var _ ultimateFormatter = (*f1)(nil) +var _ kFormatter = (*avroSchemaRegistryFormatter)(nil) +var _ kFormatter = (*zfmtShim)(nil) -type f1 struct { +type zfmtShim struct { F zfmt.Formatter } -func (f f1) Marshall(req marshReq) ([]byte, error) { +func (f zfmtShim) marshall(req marshReq) ([]byte, error) { return f.F.Marshall(req.subject) } -func (f f1) Unmarshal(req unmarshReq) error { +func (f zfmtShim) unmarshal(req unmarshReq) error { return f.F.Unmarshal(req.data, req.target) } -func getFormatter(formatter zfmt.FormatterType, schemaID int, srCfg SchemaRegistryConfig, getSR srProvider) (ultimateFormatter, error) { +func getFormatter(formatter zfmt.FormatterType, schemaID int, srCfg SchemaRegistryConfig, getSR srProvider) (kFormatter, error) { switch formatter { case AvroConfluentFmt: cl, err := getSR(srCfg) @@ -77,7 +77,7 @@ func getFormatter(formatter zfmt.FormatterType, schemaID int, srCfg SchemaRegist if err != nil { return nil, fmt.Errorf("unsupported formatter %s", formatter) } - return f1{F: f}, nil + return zfmtShim{F: f}, nil } } @@ -86,12 +86,12 @@ func getFormatter(formatter zfmt.FormatterType, schemaID int, srCfg SchemaRegist type errFormatter struct{} // Marshall returns error with reminder -func (f errFormatter) Marshall(req marshReq) ([]byte, error) { +func (f errFormatter) marshall(req marshReq) ([]byte, error) { return nil, errMissingFmtter } // Unmarshal returns error with reminder -func (f errFormatter) Unmarshal(req unmarshReq) error { +func (f errFormatter) unmarshal(req unmarshReq) error { return errMissingFmtter } @@ -145,7 +145,7 @@ func newAvroSchemaRegistryFormatter(cl schemaregistry.Client, srConfig SchemaReg }, nil } -func (f avroSchemaRegistryFormatter) Marshall(req marshReq) ([]byte, error) { +func (f avroSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { if req.schema == "" { return nil, errors.New("avro schema is required for schema registry formatter") } @@ -161,7 +161,7 @@ func (f avroSchemaRegistryFormatter) Marshall(req marshReq) ([]byte, error) { return data, nil } -func (f avroSchemaRegistryFormatter) Unmarshal(req unmarshReq) error { +func (f avroSchemaRegistryFormatter) unmarshal(req unmarshReq) error { err := f.schemaRegistryCl.DeserializeInto(req.topic, req.data, &req.target) if err != nil { return fmt.Errorf("failed to deserialize to confluent schema registry avro type: %w", err) diff --git a/formatter_test.go b/formatter_test.go index 65825a5..d36bd1a 100644 --- a/formatter_test.go +++ b/formatter_test.go @@ -9,10 +9,10 @@ import ( func TestNoopFormatter_Marshall_Unmarshal(t *testing.T) { defer recoverThenFail(t) fmtter := errFormatter{} - _, err := fmtter.Marshall(marshReq{subject: "anything"}) + _, err := fmtter.marshall(marshReq{subject: "anything"}) require.ErrorIs(t, err, errMissingFmtter) var someInt int32 - err = fmtter.Unmarshal(unmarshReq{data: []byte("test"), target: &someInt}) + err = fmtter.unmarshal(unmarshReq{data: []byte("test"), target: &someInt}) require.ErrorIs(t, err, errMissingFmtter) } diff --git a/message.go b/message.go index 20d7bcd..5e2850c 100644 --- a/message.go +++ b/message.go @@ -23,7 +23,7 @@ type Message struct { TimeStamp time.Time value []byte topicPartition kafka.TopicPartition - fmt ultimateFormatter + fmt kFormatter doneFunc func(ctx context.Context) doneOnce sync.Once } @@ -59,7 +59,7 @@ func (m *Message) unmarshall(target any) error { if m.fmt == nil { return errors.New("formatter or confluent formatter is not supplied to decode kafka message") } - return m.fmt.Unmarshal(unmarshReq{ + return m.fmt.unmarshal(unmarshReq{ topic: m.Topic, data: m.value, target: target, diff --git a/message_test.go b/message_test.go index 5e57f52..660c818 100644 --- a/message_test.go +++ b/message_test.go @@ -104,7 +104,7 @@ func TestMessage_Headers(t *testing.T) { func TestMessage_Decode(t *testing.T) { type fields struct { value []byte - fmt ultimateFormatter + fmt kFormatter } type args struct { v any @@ -133,7 +133,7 @@ func TestMessage_Decode(t *testing.T) { name: "valid message, formatter, empty input => error", fields: fields{ value: []byte("test"), - fmt: f1{&zfmt.StringFormatter{}}, + fmt: zfmtShim{&zfmt.StringFormatter{}}, }, args: args{}, wantErr: true, @@ -142,7 +142,7 @@ func TestMessage_Decode(t *testing.T) { name: "valid message, formatter, valid input => no error", fields: fields{ value: []byte("test"), - fmt: f1{&zfmt.StringFormatter{}}, + fmt: zfmtShim{&zfmt.StringFormatter{}}, }, args: args{ v: &bytes.Buffer{}, @@ -194,7 +194,7 @@ func TestMessage_Done(t *testing.T) { Key: tt.fields.Key, Headers: tt.fields.Headers, value: tt.fields.value, - fmt: f1{F: tt.fields.fmt}, + fmt: zfmtShim{F: tt.fields.fmt}, doneFunc: func(ctx context.Context) { isCalled = true }, diff --git a/reader.go b/reader.go index 8579516..bb08d5c 100644 --- a/reader.go +++ b/reader.go @@ -44,7 +44,7 @@ type KReader struct { topicConfig ConsumerTopicConfig isClosed bool - fmtter ultimateFormatter + fmtter kFormatter logger Logger lifecycle LifecycleHooks @@ -296,7 +296,7 @@ type ReaderOption func(*KReader) func RFormatterOption(fmtter Formatter) ReaderOption { return func(r *KReader) { if fmtter != nil { - r.fmtter = f1{F: fmtter} + r.fmtter = zfmtShim{F: fmtter} } } } diff --git a/testhelper.go b/testhelper.go index 988d92b..d17e228 100644 --- a/testhelper.go +++ b/testhelper.go @@ -84,7 +84,7 @@ func GetMsgFromFake(msg *FakeMessage) *Message { Partition: msg.Partition, Offset: kafka.Offset(msg.Offset), }, - fmt: f1{F: msg.Fmt}, + fmt: zfmtShim{F: msg.Fmt}, doneFunc: doneFunc, doneOnce: sync.Once{}, } diff --git a/writer.go b/writer.go index 63101b6..01a19ec 100644 --- a/writer.go +++ b/writer.go @@ -46,7 +46,7 @@ type KWriter struct { mu sync.Mutex producer KafkaProducer topicConfig ProducerTopicConfig - fmtter ultimateFormatter + fmtter kFormatter logger Logger tracer trace.Tracer p propagation.TextMapPropagator @@ -207,7 +207,7 @@ func (w *KWriter) marshall(_ context.Context, value any, schema string) ([]byte, if w.fmtter == nil { return nil, errors.New("formatter or confluent formatter is not supplied to produce kafka message") } - return w.fmtter.Marshall(marshReq{ + return w.fmtter.marshall(marshReq{ topic: w.topicConfig.Topic, subject: value, schema: schema, @@ -229,7 +229,7 @@ type WriterOption func(*KWriter) func WFormatterOption(fmtter Formatter) WriterOption { return func(w *KWriter) { if fmtter != nil { - w.fmtter = f1{F: fmtter} + w.fmtter = zfmtShim{F: fmtter} } } } diff --git a/writer_test.go b/writer_test.go index 3d95651..ccbe0d7 100644 --- a/writer_test.go +++ b/writer_test.go @@ -36,7 +36,7 @@ func TestWriter_Write(t *testing.T) { type fields struct { Mutex *sync.Mutex Producer KafkaProducer - fmt ultimateFormatter + fmt kFormatter } type args struct { ctx context.Context @@ -61,7 +61,7 @@ func TestWriter_Write(t *testing.T) { { name: "has formatter and producer", fields: fields{ - fmt: f1{&zfmt.StringFormatter{}}, + fmt: zfmtShim{&zfmt.StringFormatter{}}, Producer: p, }, args: args{ctx: context.TODO(), value: "1"}, @@ -70,7 +70,7 @@ func TestWriter_Write(t *testing.T) { { name: "has formatter, producer, incompatible message type", fields: fields{ - fmt: f1{&zfmt.StringFormatter{}}, + fmt: zfmtShim{&zfmt.StringFormatter{}}, Producer: p, }, args: args{ctx: context.TODO(), value: 5}, @@ -164,7 +164,7 @@ func TestWriter_WriteKey(t *testing.T) { w := &KWriter{ producer: tt.fields.Producer, topicConfig: tt.fields.conf, - fmtter: f1{tt.fields.fmt}, + fmtter: zfmtShim{tt.fields.fmt}, isClosed: tt.fields.isClosed, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), @@ -202,7 +202,7 @@ func TestWriter_WriteKeyReturnsImmediateError(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{}, isClosed: false, - fmtter: f1{&zfmt.JSONFormatter{}}, + fmtter: zfmtShim{&zfmt.JSONFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -240,7 +240,7 @@ func TestWriter_WritesMetrics(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: f1{&zfmt.StringFormatter{}}, + fmtter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -304,7 +304,7 @@ func TestWriter_WriteSpecialCase(t *testing.T) { t.Run(tt.name, func(t *testing.T) { w := &KWriter{ producer: tt.fields.Producer, - fmtter: f1{tt.fields.fmt}, + fmtter: zfmtShim{tt.fields.fmt}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -348,7 +348,7 @@ func TestWriter_PreWriteLifecycleHookCanAugmentHeaders(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: f1{&zfmt.StringFormatter{}}, + fmtter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -377,7 +377,7 @@ func TestWriter_WithHeadersWriteOptionCanAugmentHeaders(t *testing.T) { wr := &KWriter{ producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, - fmtter: f1{&zfmt.StringFormatter{}}, + fmtter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -435,7 +435,7 @@ func TestWriter_PreWriteLifecycleHookErrorDoesntHaltProcessing(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: f1{&zfmt.StringFormatter{}}, + fmtter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, From fc65e78779a5f3b147a6e0db815a2d90db60e283 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sat, 21 Sep 2024 08:38:08 -0700 Subject: [PATCH 10/34] Privatized some stuff --- client.go | 147 +++++++++++++++++++++++++++++---------- client_test.go | 104 ++++++++++++++++----------- config.go | 4 +- config_test.go | 6 +- formatter.go | 83 +++++++--------------- reader.go | 60 +++++++++++----- reader_test.go | 120 ++++++++++++++++++++++++++------ test/integration_test.go | 2 +- test/worker_test.go | 39 ++++------- testhelper.go | 63 +++++++++++------ work.go | 14 ++++ work_test.go | 24 +++++-- writer.go | 52 ++++++++++---- writer_test.go | 32 +++++---- 14 files changed, 494 insertions(+), 256 deletions(-) diff --git a/client.go b/client.go index 3a5bb9c..aac8b40 100644 --- a/client.go +++ b/client.go @@ -9,6 +9,8 @@ import ( "sync" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" "go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/trace" ) @@ -27,18 +29,22 @@ const instrumentationName = "github.com/zillow/zkafka" // Client helps instantiate usable readers and writers type Client struct { - mu sync.RWMutex - conf Config - readers map[string]*KReader - writers map[string]*KWriter - logger Logger - lifecycle LifecycleHooks - groupPrefix string - tp trace.TracerProvider - p propagation.TextMapPropagator - - mmu sync.Mutex - srCls map[string]schemaregistry.Client + mu sync.RWMutex + conf Config + readers map[string]Reader + writers map[string]Writer + logger Logger + lifecycle LifecycleHooks + groupPrefix string + tp trace.TracerProvider + p propagation.TextMapPropagator + srClProvider srProvider2 + //writerProvider writerProvider + //readerProvider readerProvider + + srf *schemaRegistryFactory + //mmu sync.Mutex + //srCls map[string]schemaregistry.Client // confluent dependencies producerProvider confluentProducerProvider @@ -47,15 +53,16 @@ type Client struct { // NewClient instantiates a kafka client to get readers and writers func NewClient(conf Config, opts ...Option) *Client { + srf := newSchemaRegistryFactory() c := &Client{ conf: conf, - readers: make(map[string]*KReader), - writers: make(map[string]*KWriter), - srCls: make(map[string]schemaregistry.Client), + readers: make(map[string]Reader), + writers: make(map[string]Writer), logger: NoopLogger{}, producerProvider: defaultConfluentProducerProvider{}.NewProducer, consumerProvider: defaultConfluentConsumerProvider{}.NewConsumer, + srClProvider: srf.create, } for _, opt := range opts { opt(c) @@ -71,7 +78,12 @@ func (c *Client) Reader(_ context.Context, topicConfig ConsumerTopicConfig, opts } c.mu.RLock() r, exist := c.readers[topicConfig.ClientID] - if exist && !r.isClosed { + kr, ok := r.(*KReader) + // is kr -> isClosed = true -> true + // is kr -> isClosed = false -> false + // is not kr -> false + isClosed := ok && kr.isClosed + if exist && !isClosed { c.mu.RUnlock() return r, nil } @@ -80,20 +92,31 @@ func (c *Client) Reader(_ context.Context, topicConfig ConsumerTopicConfig, opts c.mu.Lock() defer c.mu.Unlock() r, exist = c.readers[topicConfig.ClientID] - if exist && !r.isClosed { + if exist && !isClosed { return r, nil } - reader, err := newReader(c.conf, topicConfig, c.consumerProvider, c.logger, c.groupPrefix, c.schemaCl) + formatter, err := getFormatter(formatterArgs{ + formatter: topicConfig.Formatter, + schemaID: topicConfig.SchemaID, + srCfg: topicConfig.SchemaRegistry, + getSR: c.srClProvider, + }) if err != nil { return nil, err } - // copy settings from client first - reader.lifecycle = c.lifecycle - - // overwrite options if given - for _, opt := range opts { - opt(reader) + reader, err := newReader(readerArgs{ + cfg: c.conf, + cCfg: topicConfig, + consumerProvider: c.consumerProvider, + f: formatter, + l: c.logger, + prefix: c.groupPrefix, + hooks: c.lifecycle, + opts: opts, + }) + if err != nil { + return nil, err } c.readers[topicConfig.ClientID] = reader return c.readers[topicConfig.ClientID], nil @@ -107,7 +130,9 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts } c.mu.RLock() w, exist := c.writers[topicConfig.ClientID] - if exist && !w.isClosed { + kr, ok := w.(*KWriter) + isClosed := ok && kr.isClosed + if exist && !isClosed { c.mu.RUnlock() return w, nil } @@ -116,23 +141,34 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts c.mu.Lock() defer c.mu.Unlock() w, exist = c.writers[topicConfig.ClientID] - if exist && !w.isClosed { + if exist && !isClosed { return w, nil } - writer, err := newWriter(c.conf, topicConfig, c.producerProvider, c.schemaCl) + formatter, err := getFormatter(formatterArgs{ + formatter: topicConfig.Formatter, + schemaID: topicConfig.SchemaID, + srCfg: topicConfig.SchemaRegistry, + getSR: c.srClProvider, + }) + + if err != nil { + return nil, err + } + writer, err := newWriter(writerArgs{ + cfg: c.conf, + pCfg: topicConfig, + producerProvider: c.producerProvider, + f: formatter, + l: c.logger, + t: getTracer(c.tp), + p: c.p, + hooks: c.lifecycle, + opts: opts, + }) if err != nil { return nil, err } - // copy settings from client first - writer.logger = c.logger - writer.tracer = getTracer(c.tp) - writer.p = c.p - writer.lifecycle = c.lifecycle - // overwrite options if given - for _, opt := range opts { - opt(writer) - } c.writers[topicConfig.ClientID] = writer return c.writers[topicConfig.ClientID], nil } @@ -157,7 +193,44 @@ func (c *Client) Close() error { return err } -func (c *Client) schemaCl(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { +type schemaRegistryFactory struct { + mmu sync.Mutex + srCls map[string]schemaregistry.Client +} + +func newSchemaRegistryFactory() *schemaRegistryFactory { + return &schemaRegistryFactory{ + srCls: make(map[string]schemaregistry.Client), + } +} + +func (c *schemaRegistryFactory) create(srConfig SchemaRegistryConfig) (schemaRegistryCl, error) { + cl, err := c.getSchemaClient(srConfig) + if err != nil { + return nil, err + } + + deserConfig := avrov2.NewDeserializerConfig() + deser, err := avrov2.NewDeserializer(cl, serde.ValueSerde, deserConfig) + if err != nil { + return shim{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := avrov2.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := avrov2.NewSerializer(cl, serde.ValueSerde, serConfig) + if err != nil { + return shim{}, fmt.Errorf("failed to create serializer: %w", err) + } + return shim{ + ser: ser, + deser: deser, + }, nil +} + +func (c *schemaRegistryFactory) getSchemaClient(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { url := srConfig.URL if url == "" { return nil, errors.New("no schema registry url provided") diff --git a/client_test.go b/client_test.go index c204f04..a03a482 100644 --- a/client_test.go +++ b/client_test.go @@ -33,8 +33,8 @@ func TestNewClient(t *testing.T) { { name: "empty config", want: &Client{ - readers: make(map[string]*KReader), - writers: make(map[string]*KWriter), + readers: make(map[string]Reader), + writers: make(map[string]Writer), logger: NoopLogger{}, producerProvider: defaultConfluentProducerProvider{}.NewProducer, consumerProvider: defaultConfluentConsumerProvider{}.NewConsumer, @@ -51,8 +51,8 @@ func TestNewClient(t *testing.T) { conf: Config{ BootstrapServers: []string{"test"}, }, - readers: make(map[string]*KReader), - writers: make(map[string]*KWriter), + readers: make(map[string]Reader), + writers: make(map[string]Writer), logger: NoopLogger{}, producerProvider: defaultConfluentProducerProvider{}.NewProducer, consumerProvider: defaultConfluentConsumerProvider{}.NewConsumer, @@ -124,8 +124,8 @@ func TestClient_WithOptions(t *testing.T) { func TestClient_Reader(t *testing.T) { type fields struct { conf Config - readers map[string]*KReader - writers map[string]*KWriter + readers map[string]Reader + writers map[string]Writer logger Logger producerProvider confluentProducerProvider consumerProvider confluentConsumerProvider @@ -146,7 +146,7 @@ func TestClient_Reader(t *testing.T) { name: "create new KReader with overridden Brokers, error from consumer provider", fields: fields{ consumerProvider: mockConfluentConsumerProvider{err: true}.NewConsumer, - readers: make(map[string]*KReader), + readers: make(map[string]Reader), }, args: args{ topicConfig: ConsumerTopicConfig{ @@ -162,7 +162,7 @@ func TestClient_Reader(t *testing.T) { name: "create new KReader with bad formatter", fields: fields{ consumerProvider: mockConfluentConsumerProvider{err: false}.NewConsumer, - readers: make(map[string]*KReader), + readers: make(map[string]Reader), }, args: args{ topicConfig: ConsumerTopicConfig{ @@ -178,8 +178,8 @@ func TestClient_Reader(t *testing.T) { { name: "create new KReader for closed KReader", fields: fields{ - readers: map[string]*KReader{ - "test-config": {isClosed: true}, + readers: map[string]Reader{ + "test-config": &KReader{isClosed: true}, }, consumerProvider: mockConfluentConsumerProvider{c: MockKafkaConsumer{ID: "stew"}}.NewConsumer, logger: NoopLogger{}, @@ -210,8 +210,8 @@ func TestClient_Reader(t *testing.T) { { name: "create new KReader for closed KReader with default overrides", fields: fields{ - readers: map[string]*KReader{ - "test-config": {isClosed: true}, + readers: map[string]Reader{ + "test-config": &KReader{isClosed: true}, }, consumerProvider: mockConfluentConsumerProvider{c: MockKafkaConsumer{ID: "stew"}}.NewConsumer, logger: NoopLogger{}, @@ -250,8 +250,8 @@ func TestClient_Reader(t *testing.T) { { name: "get from cache", fields: fields{ - readers: map[string]*KReader{ - "test-config": {}, + readers: map[string]Reader{ + "test-config": &KReader{}, }, }, args: args{ @@ -303,8 +303,8 @@ func TestClient_Reader(t *testing.T) { func TestClient_Writer(t *testing.T) { type fields struct { conf Config - readers map[string]*KReader - writers map[string]*KWriter + readers map[string]Reader + writers map[string]Writer logger Logger producerProvider confluentProducerProvider } @@ -324,7 +324,7 @@ func TestClient_Writer(t *testing.T) { name: "create new KWriter with overridden Brokers, error from producer provider", fields: fields{ producerProvider: mockConfluentProducerProvider{err: true}.NewProducer, - writers: make(map[string]*KWriter), + writers: make(map[string]Writer), conf: Config{ SaslUsername: ptr("test-user"), SaslPassword: ptr("test-password"), @@ -342,8 +342,8 @@ func TestClient_Writer(t *testing.T) { { name: "create new KWriter for closed writer", fields: fields{ - writers: map[string]*KWriter{ - "test-id": {isClosed: true}, + writers: map[string]Writer{ + "test-id": &KWriter{isClosed: true}, }, producerProvider: mockConfluentProducerProvider{}.NewProducer, logger: NoopLogger{}, @@ -371,8 +371,8 @@ func TestClient_Writer(t *testing.T) { { name: "create new KWriter for closed writer with default overrides", fields: fields{ - writers: map[string]*KWriter{ - "test-id": {isClosed: true}, + writers: map[string]Writer{ + "test-id": &KWriter{isClosed: true}, }, producerProvider: mockConfluentProducerProvider{}.NewProducer, logger: NoopLogger{}, @@ -407,8 +407,8 @@ func TestClient_Writer(t *testing.T) { { name: "get from cache", fields: fields{ - writers: map[string]*KWriter{ - "test-id": {}, + writers: map[string]Writer{ + "test-id": &KWriter{}, }, }, args: args{ @@ -453,20 +453,32 @@ func TestClient_Close(t *testing.T) { type fields struct { Mutex *sync.Mutex conf Config - readers map[string]*KReader - writers map[string]*KWriter + readers map[string]Reader + writers map[string]Writer } m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r1, err := newReader(Config{}, ConsumerTopicConfig{ - Formatter: zfmt.StringFmt, - }, m, &NoopLogger{}, "", nil) + r1, err := newReader(readerArgs{ + cfg: Config{}, + cCfg: ConsumerTopicConfig{ + Formatter: zfmt.StringFmt, + }, + consumerProvider: m, + f: zfmtShim{F: &zfmt.StringFormatter{}}, + l: &NoopLogger{}, + }) require.NoError(t, err) - r2, err := newReader(Config{}, ConsumerTopicConfig{ - Formatter: zfmt.StringFmt, - }, m, &NoopLogger{}, "", nil) + r2, err := newReader(readerArgs{ + cfg: Config{}, + cCfg: ConsumerTopicConfig{ + Formatter: zfmt.StringFmt, + }, + consumerProvider: m, + f: zfmtShim{F: &zfmt.StringFormatter{}}, + l: &NoopLogger{}, + }) require.NoError(t, err) tests := []struct { name string @@ -481,13 +493,13 @@ func TestClient_Close(t *testing.T) { name: "with readers/writers => no error", wantErr: true, fields: fields{ - readers: map[string]*KReader{ + readers: map[string]Reader{ "r1": r1, "r2": r2, }, - writers: map[string]*KWriter{ - "w1": {producer: p}, - "w2": {producer: p}, + writers: map[string]Writer{ + "w1": &KWriter{producer: p}, + "w2": &KWriter{producer: p}, }, }, }, @@ -507,10 +519,14 @@ func TestClient_Close(t *testing.T) { require.NoError(t, err) } for _, w := range c.writers { - require.True(t, w.isClosed, "clients writer should be closed") + kw, ok := w.(*KWriter) + require.True(t, ok, "Expected writer to be KWriter") + require.True(t, kw.isClosed, "clients writer should be closed") } - for _, reader := range c.readers { - require.True(t, reader.isClosed, "clients reader should be closed") + for _, r := range c.readers { + kr, ok := r.(*KReader) + require.True(t, ok, "Expected reader to be KReader") + require.True(t, kr.isClosed, "clients reader should be closed") } }) } @@ -588,7 +604,11 @@ func Test_getFormatter_Consumer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID, SchemaRegistryConfig{}, nil) + args := formatterArgs{ + formatter: tt.args.topicConfig.Formatter, + schemaID: tt.args.topicConfig.SchemaID, + } + got, err := getFormatter(args) if tt.wantErr { require.Error(t, err) } else { @@ -625,7 +645,11 @@ func Test_getFormatter_Producer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got, err := getFormatter(tt.args.topicConfig.Formatter, tt.args.topicConfig.SchemaID, SchemaRegistryConfig{}, nil) + args := formatterArgs{ + formatter: tt.args.topicConfig.Formatter, + schemaID: tt.args.topicConfig.SchemaID, + } + got, err := getFormatter(args) if tt.wantErr { require.Error(t, err) } else { diff --git a/config.go b/config.go index 090b5da..fd093f6 100644 --- a/config.go +++ b/config.go @@ -80,7 +80,7 @@ type ConsumerTopicConfig struct { SchemaRegistry SchemaRegistryConfig - // SchemaID defines the schema registered with Confluent Schema Registry + // SchemaID defines the schema registered with Confluent schema Registry // Default value is 0, and it implies that both Writer and Reader do not care about schema validation // and should encode/decode the message based on data type provided. // Currently, this only works with SchematizedAvroFormatter @@ -176,7 +176,7 @@ type ProducerTopicConfig struct { SchemaRegistry SchemaRegistryConfig - // SchemaID defines the schema registered with Confluent Schema Registry + // SchemaID defines the schema registered with Confluent schema Registry // Default value is 0, and it implies that both Writer and Reader do not care about schema validation // and should encode/decode the message based on data type provided. // Currently, this only works with SchematizedAvroFormatter diff --git a/config_test.go b/config_test.go index f30f255..12cf25c 100644 --- a/config_test.go +++ b/config_test.go @@ -28,7 +28,7 @@ func Test_getDefaultConsumerTopicConfig(t *testing.T) { wantErr: true, }, { - name: "missing required field (Topic) => error", + name: "missing required field (topic) => error", args: args{conf: &ConsumerTopicConfig{ GroupID: "test_group", ClientID: "test", @@ -36,7 +36,7 @@ func Test_getDefaultConsumerTopicConfig(t *testing.T) { wantErr: true, }, { - name: "missing required non empty fields (Topic and or Topics) => error", + name: "missing required non empty fields (topic and or Topics) => error", args: args{conf: &ConsumerTopicConfig{ GroupID: "test_group", ClientID: "test", @@ -126,7 +126,7 @@ func Test_getDefaultProducerTopicConfig(t *testing.T) { wantErr bool }{ { - name: "missing required field (Topic) => error", + name: "missing required field (topic) => error", args: args{conf: &ProducerTopicConfig{ ClientID: "test", }}, diff --git a/formatter.go b/formatter.go index f74fb0b..4ee1c5d 100644 --- a/formatter.go +++ b/formatter.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" "github.com/zillow/zfmt" ) @@ -24,15 +23,12 @@ type Formatter interface { Unmarshal(b []byte, v any) error } -//type confluentFormatter interface { -// marshall(topic string, v any, schema string) ([]byte, error) -// unmarshal(topic string, b []byte, v any) error -//} - type marshReq struct { - topic string + topic string + // subject is the data to be marshalled subject any - schema string + // schema + schema string } type unmarshReq struct { @@ -61,14 +57,26 @@ func (f zfmtShim) unmarshal(req unmarshReq) error { return f.F.Unmarshal(req.data, req.target) } -func getFormatter(formatter zfmt.FormatterType, schemaID int, srCfg SchemaRegistryConfig, getSR srProvider) (kFormatter, error) { +type formatterArgs struct { + formatter zfmt.FormatterType + schemaID int + srCfg SchemaRegistryConfig + getSR srProvider2 +} + +func getFormatter(args formatterArgs) (kFormatter, error) { + formatter := args.formatter + schemaID := args.schemaID + switch formatter { case AvroConfluentFmt: - cl, err := getSR(srCfg) + srCfg := args.srCfg + getSR := args.getSR + scl, err := getSR(srCfg) if err != nil { return nil, err } - cf, err := newAvroSchemaRegistryFormatter(cl, srCfg) + cf, err := NewAvroSchemaRegistryFormatter(scl) return cf, err case CustomFmt: return &errFormatter{}, nil @@ -95,53 +103,14 @@ func (f errFormatter) unmarshal(req unmarshReq) error { return errMissingFmtter } -//var _ confluentFormatter = (*avroSchemaRegistryFormatter)(nil) - type avroSchemaRegistryFormatter struct { schemaRegistryCl schemaRegistryCl - //deser *avrov2.Deserializer - //ser *avrov2.Serializer - f zfmt.SchematizedAvroFormatter -} - -// func newAvroConfig(srConfig SchemaRegistryConfig) (*avro.SerializerConfig, *avro.DeserializerConfig, error) { -// url := srConfig.URL -// if url == "" { -// return nil, nil, errors.New("no schema registry url provided") -// } -// deserConfig := avrov2.NewDeserializerConfig() -// -// serConfig := avrov2.NewSerializerConfig() -// serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas -// serConfig.NormalizeSchemas = true -// -// } -func newAvroSchemaRegistryFormatter(cl schemaregistry.Client, srConfig SchemaRegistryConfig) (avroSchemaRegistryFormatter, error) { - url := srConfig.URL - if url == "" { - return avroSchemaRegistryFormatter{}, errors.New("no schema registry url provided") - } - - deserConfig := avrov2.NewDeserializerConfig() - deser, err := avrov2.NewDeserializer(cl, serde.ValueSerde, deserConfig) - if err != nil { - return avroSchemaRegistryFormatter{}, fmt.Errorf("failed to create deserializer: %w", err) - } - - serConfig := avrov2.NewSerializerConfig() - serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas - serConfig.NormalizeSchemas = true + f zfmt.SchematizedAvroFormatter +} - ser, err := avrov2.NewSerializer(cl, serde.ValueSerde, serConfig) - if err != nil { - return avroSchemaRegistryFormatter{}, fmt.Errorf("failed to create serializer: %w", err) - } - shimcl := shim{ - ser: ser, - deser: deser, - } +func NewAvroSchemaRegistryFormatter(shimCl schemaRegistryCl) (avroSchemaRegistryFormatter, error) { return avroSchemaRegistryFormatter{ - schemaRegistryCl: shimcl, + schemaRegistryCl: shimCl, }, nil } @@ -162,7 +131,7 @@ func (f avroSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { } func (f avroSchemaRegistryFormatter) unmarshal(req unmarshReq) error { - err := f.schemaRegistryCl.DeserializeInto(req.topic, req.data, &req.target) + err := f.schemaRegistryCl.Deserialize(req.topic, req.data, &req.target) if err != nil { return fmt.Errorf("failed to deserialize to confluent schema registry avro type: %w", err) } @@ -171,7 +140,7 @@ func (f avroSchemaRegistryFormatter) unmarshal(req unmarshReq) error { type schemaRegistryCl interface { GetID(topic string, avroSchema string) (int, error) - DeserializeInto(topic string, value []byte, target any) error + Deserialize(topic string, value []byte, target any) error } var _ schemaRegistryCl = (*shim)(nil) @@ -185,6 +154,6 @@ func (s shim) GetID(topic string, avroSchema string) (int, error) { return s.ser.GetID(topic, nil, &schemaregistry.SchemaInfo{Schema: avroSchema}) } -func (s shim) DeserializeInto(topic string, value []byte, target any) error { +func (s shim) Deserialize(topic string, value []byte, target any) error { return s.deser.DeserializeInto(topic, value, target) } diff --git a/reader.go b/reader.go index bb08d5c..dcf9e7b 100644 --- a/reader.go +++ b/reader.go @@ -11,7 +11,6 @@ import ( "time" "github.com/confluentinc/confluent-kafka-go/v2/kafka" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" ) //// go:generate mockgen -destination=./mocks/mock_metrics.go -source=reader.go @@ -52,28 +51,52 @@ type KReader struct { tCommitMgr *topicCommitMgr } -type srProvider func(_ SchemaRegistryConfig) (schemaregistry.Client, error) +type srProvider2 func(_ SchemaRegistryConfig) (schemaRegistryCl, error) + +type readerArgs struct { + cfg Config + cCfg ConsumerTopicConfig + consumerProvider confluentConsumerProvider + f kFormatter + l Logger + prefix string + hooks LifecycleHooks + opts []ReaderOption +} + +//type readerProvider func(args readerArgs) (Reader, error) // newReader makes a new reader based on the configurations -func newReader(conf Config, topicConfig ConsumerTopicConfig, provider confluentConsumerProvider, logger Logger, prefix string, srProvider srProvider) (*KReader, error) { +func newReader(args readerArgs) (*KReader, error) { + conf := args.cfg + topicConfig := args.cCfg + prefix := args.prefix + provider := args.consumerProvider + formatter := args.f + logger := args.l + confluentConfig := makeConsumerConfig(conf, topicConfig, prefix) consumer, err := provider(confluentConfig) if err != nil { return nil, err } - formatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry, srProvider) - if err != nil { - return nil, err - } - - return &KReader{ + r := &KReader{ consumer: consumer, - fmtter: formatter, topicConfig: topicConfig, + fmtter: formatter, logger: logger, + lifecycle: args.hooks, tCommitMgr: newTopicCommitMgr(), - }, nil + } + s := ReaderSettings{} + for _, opt := range args.opts { + opt(&s) + } + if s.fmtter != nil { + r.fmtter = s.fmtter + } + return r, nil } // Read consumes a single message at a time. Blocks until a message is returned or some @@ -92,8 +115,9 @@ func (r *KReader) Read(ctx context.Context) (*Message, error) { } kmsg, err := r.consumer.ReadMessage(time.Duration(*r.topicConfig.ReadTimeoutMillis) * time.Millisecond) if err != nil { - switch v := err.(type) { - case kafka.Error: + var v kafka.Error + switch { + case errors.As(err, &v): // timeouts occur (because the assigned partitions aren't being written to, lack of activity, etc.). We'll // log them for debugging purposes if v.Code() == kafka.ErrTimedOut { @@ -289,14 +313,18 @@ func getTopicName(topicName *string) string { return topic } +type ReaderSettings struct { + fmtter kFormatter +} + // ReaderOption is a function that modify the KReader configurations -type ReaderOption func(*KReader) +type ReaderOption func(*ReaderSettings) // RFormatterOption sets the formatter for this reader func RFormatterOption(fmtter Formatter) ReaderOption { - return func(r *KReader) { + return func(s *ReaderSettings) { if fmtter != nil { - r.fmtter = zfmtShim{F: fmtter} + s.fmtter = zfmtShim{F: fmtter} } } } diff --git a/reader_test.go b/reader_test.go index f38e01f..a1aa7a7 100644 --- a/reader_test.go +++ b/reader_test.go @@ -31,7 +31,14 @@ func TestReader_Read_NilReturn(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, _ := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + args := readerArgs{ + cfg: Config{}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) + require.NoError(t, err) got, err := r.Read(context.TODO()) require.NoError(t, err) @@ -59,7 +66,17 @@ func TestReader_Read(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + f, err := getFormatter(formatterArgs{formatter: topicConfig.Formatter}) + require.NoError(t, err) + + args := readerArgs{ + cfg: Config{}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + f: f, + } + r, err := newReader(args) require.NoError(t, err) got, err := r.Read(context.TODO()) @@ -96,7 +113,13 @@ func TestReader_Read_Error(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + args := readerArgs{ + cfg: Config{}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) require.NoError(t, err) got, err := r.Read(context.TODO()) @@ -129,7 +152,13 @@ func TestReader_Read_TimeoutError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + args := readerArgs{ + cfg: Config{}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) require.NoError(t, err) got, err := r.Read(context.TODO()) @@ -149,7 +178,13 @@ func TestReader_Read_SubscriberError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + args := readerArgs{ + cfg: Config{}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) require.NoError(t, err) _, err = r.Read(context.TODO()) @@ -170,7 +205,13 @@ func TestReader_Read_CloseError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + args := readerArgs{ + cfg: Config{}, + cCfg: topicConfig, + consumerProvider: m, + l: &l, + } + r, err := newReader(args) require.NoError(t, err) err = r.Close() @@ -191,7 +232,13 @@ func TestReader_ReadWhenConnectionIsClosed(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &NoopLogger{}, "", nil) + args := readerArgs{ + cfg: Config{}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) require.NoError(t, err) err = r.Close() @@ -221,16 +268,16 @@ func Test_newReader(t *testing.T) { }, wantErr: false, }, - { - name: "invalid formatter", - args: args{ - consumeProvider: defaultConfluentConsumerProvider{}.NewConsumer, - topicConfig: ConsumerTopicConfig{ - Formatter: zfmt.FormatterType("invalid_fmt"), - }, - }, - wantErr: true, - }, + //{ + // name: "invalid formatter", + // args: args{ + // consumeProvider: defaultConfluentConsumerProvider{}.NewConsumer, + // topicConfig: ConsumerTopicConfig{ + // Formatter: zfmt.FormatterType("invalid_fmt"), + // }, + // }, + // wantErr: true, + //}, { name: "valid formatter but has error when creating NewConsumer", args: args{ @@ -252,7 +299,14 @@ func Test_newReader(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - _, err := newReader(tt.args.conf, tt.args.topicConfig, tt.args.consumeProvider, &NoopLogger{}, "", nil) + args := readerArgs{ + cfg: tt.args.conf, + cCfg: tt.args.topicConfig, + consumerProvider: tt.args.consumeProvider, + l: &NoopLogger{}, + } + _, err := newReader(args) + if tt.wantErr { require.Error(t, err) } else { @@ -284,7 +338,13 @@ func Test_ProcessMessage(t *testing.T) { m := mockConfluentConsumerProvider{ c: mock_confluent.NewMockKafkaConsumer(ctrl), }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + args := readerArgs{ + cfg: Config{}, + cCfg: topicConfig, + consumerProvider: m, + l: &NoopLogger{}, + } + r, err := newReader(args) require.NoError(t, err) got := r.mapMessage(context.Background(), dupMessage) @@ -317,7 +377,13 @@ func Test_ProcessMultipleMessagesFromDifferentTopics_UpdatesInternalStateProperl m := mockConfluentConsumerProvider{ c: mock_confluent.NewMockKafkaConsumer(ctrl), }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + + args := readerArgs{ + cCfg: topicConfig, + consumerProvider: m, + l: &l, + } + r, err := newReader(args) require.NoError(t, err) for _, msg := range msgs { @@ -359,7 +425,12 @@ func Test_ProcessMessage_StoreOffsetError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + args := readerArgs{ + cCfg: topicConfig, + consumerProvider: m, + l: &l, + } + r, err := newReader(args) require.NoError(t, err) mgr := newTopicCommitMgr() @@ -407,7 +478,12 @@ func Test_ProcessMessage_SetError(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - r, err := newReader(Config{}, topicConfig, m, &l, "", nil) + args := readerArgs{ + cCfg: topicConfig, + consumerProvider: m, + l: &l, + } + r, err := newReader(args) require.NoError(t, err) mgr := newTopicCommitMgr() diff --git a/test/integration_test.go b/test/integration_test.go index 8301a67..227cc26 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -409,7 +409,7 @@ func Test_WithMultipleTopics_RebalanceDoesntCauseDuplicateMessages(t *testing.T) Val: "sdfds", } - t.Log("Begin writing to Test Topic") + t.Log("Begin writing to Test topic") // write N messages to topic1 msgCount := tc.messageCount for i := 0; i < msgCount; i++ { diff --git a/test/worker_test.go b/test/worker_test.go index cab8e52..d1334c9 100644 --- a/test/worker_test.go +++ b/test/worker_test.go @@ -82,8 +82,7 @@ func TestWork_Run_FailsWithLogsWhenGotNilReader(t *testing.T) { l.EXPECT().Warnw(gomock.Any(), "Kafka worker read message failed", "error", gomock.Any(), "topics", gomock.Any()).Times(1) l.EXPECT().Debugw(gomock.Any(), gomock.Any()).AnyTimes() - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(nil, nil) + kcp := zkafka.FakeClient{R: nil} ctx, cancel := context.WithCancel(ctx) defer cancel() @@ -111,8 +110,7 @@ func TestWork_Run_FailsWithLogsForReadError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).Times(1).Return(nil, errors.New("error occurred during read")) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} ctx, cancel := context.WithCancel(ctx) defer cancel() @@ -139,8 +137,7 @@ func TestWork_Run_CircuitBreakerOpensOnReadError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).AnyTimes().Return(nil, errors.New("error occurred during read")) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} kwf := zkafka.NewWorkFactory(kcp, zkafka.WithLogger(l)) @@ -192,8 +189,7 @@ func TestWork_Run_CircuitBreaksOnProcessError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).AnyTimes().Return(r, nil) + kcp := zkafka.FakeClient{R: r} kproc := &fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -251,8 +247,7 @@ func TestWork_Run_DoNotSkipCircuitBreak(t *testing.T) { r.EXPECT().Read(gomock.Any()).Return(failureMessage, nil).AnyTimes() - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).AnyTimes().Return(r, nil) + kcp := zkafka.FakeClient{R: r} kproc := &fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -314,8 +309,7 @@ func TestWork_Run_DoSkipCircuitBreak(t *testing.T) { r.EXPECT().Read(gomock.Any()).Return(failureMessage, nil).AnyTimes() - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).AnyTimes().Return(r, nil) + kcp := zkafka.FakeClient{R: r} kproc := fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -376,8 +370,7 @@ func TestWork_Run_CircuitBreaksOnProcessPanicInsideProcessorGoRoutine(t *testing r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).AnyTimes().Return(r, nil) + kcp := zkafka.FakeClient{R: r} kproc := &fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -442,8 +435,7 @@ func TestWork_Run_DisabledCircuitBreakerContinueReadError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).MinTimes(4).Return(nil, errors.New("error occurred on read")) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} kwf := zkafka.NewWorkFactory(kcp, zkafka.WithLogger(l)) @@ -1162,8 +1154,7 @@ func TestWork_Run_OnDoneCallbackCalledOnProcessorError(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} kwf := zkafka.NewWorkFactory(kcp, zkafka.WithLogger(l)) @@ -1224,8 +1215,7 @@ func TestWork_Run_WritesMetrics(t *testing.T) { r := zkafka_mocks.NewMockReader(ctrl) r.EXPECT().Read(gomock.Any()).MinTimes(1).Return(msg, nil) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} lhMtx := sync.Mutex{} lhState := FakeLifecycleState{ @@ -1286,8 +1276,7 @@ func TestWork_LifecycleHooksCalledForEachItem_Reader(t *testing.T) { r.EXPECT().Read(gomock.Any()).AnyTimes().Return(nil, nil), ) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} lhMtx := sync.Mutex{} lhState := FakeLifecycleState{ @@ -1349,8 +1338,7 @@ func TestWork_LifecycleHooksPostReadCanUpdateContext(t *testing.T) { r.EXPECT().Read(gomock.Any()).AnyTimes().Return(nil, nil), ) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} lhMtx := sync.Mutex{} lhState := FakeLifecycleState{ @@ -1410,8 +1398,7 @@ func TestWork_LifecycleHooksPostReadErrorDoesntHaltProcessing(t *testing.T) { r.EXPECT().Read(gomock.Any()).AnyTimes().Return(nil, nil), ) - kcp := zkafka_mocks.NewMockClientProvider(ctrl) - kcp.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(r, nil) + kcp := zkafka.FakeClient{R: r} lhMtx := sync.Mutex{} lhState := FakeLifecycleState{ diff --git a/testhelper.go b/testhelper.go index d17e228..1ce4b3e 100644 --- a/testhelper.go +++ b/testhelper.go @@ -45,47 +45,66 @@ type FakeMessage struct { } // GetMsgFromFake allows the construction of a Message object (allowing the specification of some private fields). -func GetMsgFromFake(msg *FakeMessage) *Message { - if msg == nil { +func GetMsgFromFake(input *FakeMessage) *Message { + if input == nil { return nil } key := "" - if msg.Key != nil { - key = *msg.Key + if input.Key != nil { + key = *input.Key } timeStamp := time.Now() - if !msg.TimeStamp.IsZero() { - timeStamp = msg.TimeStamp + if !input.TimeStamp.IsZero() { + timeStamp = input.TimeStamp } doneFunc := func(ctx context.Context) {} - if msg.DoneFunc != nil { - doneFunc = msg.DoneFunc + if input.DoneFunc != nil { + doneFunc = input.DoneFunc } var val []byte - if msg.Value != nil { - val = msg.Value + if input.Value != nil { + val = input.Value } - if msg.ValueData != nil { + if input.ValueData != nil { //nolint:errcheck // To simplify this helper function's api, we'll suppress marshalling errors. - val, _ = msg.Fmt.Marshall(msg.ValueData) + val, _ = input.Fmt.Marshall(input.ValueData) } return &Message{ Key: key, - isKeyNil: msg.Key == nil, - Headers: msg.Headers, - Offset: msg.Offset, - Partition: msg.Partition, - Topic: msg.Topic, - GroupID: msg.GroupID, + isKeyNil: input.Key == nil, + Headers: input.Headers, + Offset: input.Offset, + Partition: input.Partition, + Topic: input.Topic, + GroupID: input.GroupID, TimeStamp: timeStamp, value: val, topicPartition: kafka.TopicPartition{ - Topic: &msg.Topic, - Partition: msg.Partition, - Offset: kafka.Offset(msg.Offset), + Topic: &input.Topic, + Partition: input.Partition, + Offset: kafka.Offset(input.Offset), }, - fmt: zfmtShim{F: msg.Fmt}, + fmt: zfmtShim{F: input.Fmt}, doneFunc: doneFunc, doneOnce: sync.Once{}, } } + +var _ ClientProvider = (*FakeClient)(nil) + +type FakeClient struct { + R Reader + W Writer +} + +func (f FakeClient) Reader(ctx context.Context, topicConfig ConsumerTopicConfig, opts ...ReaderOption) (Reader, error) { + return f.R, nil +} + +func (f FakeClient) Writer(ctx context.Context, topicConfig ProducerTopicConfig, opts ...WriterOption) (Writer, error) { + return f.W, nil +} + +func (f FakeClient) Close() error { + return nil +} diff --git a/work.go b/work.go index 1a5de2a..3ac17e9 100644 --- a/work.go +++ b/work.go @@ -645,6 +645,10 @@ func NewWorkFactory( return factory } +func (f WorkFactory) CreateWithFunc(topicConfig ConsumerTopicConfig, p func(_ context.Context, msg *Message) error, options ...WorkOption) *Work { + return f.Create(topicConfig, processorAdapter{p: p}, options...) +} + // Create creates a new Work instance. func (f WorkFactory) Create(topicConfig ConsumerTopicConfig, processor processor, options ...WorkOption) *Work { work := &Work{ @@ -825,3 +829,13 @@ func (c *delayCalculator) remaining(targetDelay time.Duration, msgTimeStamp time // this piece makes sure the return isn't possibly greater than the target return min(targetDelay-observedDelay, targetDelay) } + +var _ processor = (*processorAdapter)(nil) + +type processorAdapter struct { + p func(_ context.Context, msg *Message) error +} + +func (a processorAdapter) Process(ctx context.Context, message *Message) error { + return a.p(ctx, message) +} diff --git a/work_test.go b/work_test.go index 9eb7024..f6ac8cd 100644 --- a/work_test.go +++ b/work_test.go @@ -783,14 +783,17 @@ func (m *timeDelayProcessor) Process(_ context.Context, message *Message) error return nil } -type mockClientProvider struct{} +type mockClientProvider struct { + r Reader + w Writer +} -func (mockClientProvider) Reader(ctx context.Context, topicConfig ConsumerTopicConfig, opts ...ReaderOption) (Reader, error) { - return nil, nil +func (m mockClientProvider) Reader(ctx context.Context, topicConfig ConsumerTopicConfig, opts ...ReaderOption) (Reader, error) { + return m.r, nil } -func (mockClientProvider) Writer(ctx context.Context, topicConfig ProducerTopicConfig, opts ...WriterOption) (Writer, error) { - return nil, nil +func (m mockClientProvider) Writer(ctx context.Context, topicConfig ProducerTopicConfig, opts ...WriterOption) (Writer, error) { + return m.w, nil } func (mockClientProvider) Close() error { @@ -815,3 +818,14 @@ type workSettings struct { func (w *workSettings) ShutdownSig() <-chan struct{} { return w.shutdownSig } + +type fakeProcessor struct { + process func(context.Context, *Message) error +} + +func (p *fakeProcessor) Process(ctx context.Context, msg *Message) error { + if p.process != nil { + return p.process(ctx, msg) + } + return nil +} diff --git a/writer.go b/writer.go index 01a19ec..8569edc 100644 --- a/writer.go +++ b/writer.go @@ -59,23 +59,47 @@ type keyValuePair struct { value any } -func newWriter(conf Config, topicConfig ProducerTopicConfig, producer confluentProducerProvider, srProvider srProvider) (*KWriter, error) { +type writerArgs struct { + cfg Config + pCfg ProducerTopicConfig + producerProvider confluentProducerProvider + f kFormatter + l Logger + t trace.Tracer + p propagation.TextMapPropagator + hooks LifecycleHooks + opts []WriterOption +} + +func newWriter(args writerArgs) (*KWriter, error) { + conf := args.cfg + topicConfig := args.pCfg + producer := args.producerProvider + formatter := args.f + confluentConfig := makeProducerConfig(conf, topicConfig) p, err := producer(confluentConfig) if err != nil { return nil, err } - formatter, err := getFormatter(topicConfig.Formatter, topicConfig.SchemaID, topicConfig.SchemaRegistry, srProvider) - if err != nil { - return nil, err - } - return &KWriter{ + w := &KWriter{ producer: p, - fmtter: formatter, topicConfig: topicConfig, - logger: NoopLogger{}, - }, nil + fmtter: formatter, + logger: args.l, + tracer: args.t, + p: args.p, + lifecycle: args.hooks, + } + s := WriterSettings{} + for _, opt := range args.opts { + opt(&s) + } + if s.fmtter != nil { + w.fmtter = s.fmtter + } + return w, nil } // Write sends messages to kafka with message key set as nil. @@ -222,14 +246,18 @@ func (w *KWriter) Close() { w.isClosed = true } +type WriterSettings struct { + fmtter kFormatter +} + // WriterOption is a function that modify the writer configurations -type WriterOption func(*KWriter) +type WriterOption func(*WriterSettings) // WFormatterOption sets the formatter for this writer func WFormatterOption(fmtter Formatter) WriterOption { - return func(w *KWriter) { + return func(s *WriterSettings) { if fmtter != nil { - w.fmtter = zfmtShim{F: fmtter} + s.fmtter = zfmtShim{F: fmtter} } } } diff --git a/writer_test.go b/writer_test.go index ccbe0d7..798d949 100644 --- a/writer_test.go +++ b/writer_test.go @@ -513,16 +513,16 @@ func Test_newWriter(t *testing.T) { }, wantErr: false, }, - { - name: "invalid formatter", - args: args{ - producerP: defaultConfluentProducerProvider{}.NewProducer, - topicConfig: ProducerTopicConfig{ - Formatter: zfmt.FormatterType("invalid_fmt"), - }, - }, - wantErr: true, - }, + //{ + // name: "invalid formatter", + // args: args{ + // producerP: defaultConfluentProducerProvider{}.NewProducer, + // topicConfig: ProducerTopicConfig{ + // Formatter: zfmt.FormatterType("invalid_fmt"), + // }, + // }, + // wantErr: true, + //}, { name: "valid formatter but has error from confluent producer constructor", args: args{ @@ -544,7 +544,12 @@ func Test_newWriter(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { recoverThenFail(t) - w, err := newWriter(tt.args.conf, tt.args.topicConfig, tt.args.producerP, nil) + args := writerArgs{ + cfg: tt.args.conf, + pCfg: tt.args.topicConfig, + producerProvider: tt.args.producerP, + } + w, err := newWriter(args) if tt.wantErr { require.Error(t, err, "expected error for newWriter()") } else { @@ -560,8 +565,9 @@ func TestWriter_WithOptions(t *testing.T) { w := &KWriter{} require.Nil(t, w.fmtter, "expected nil formatter") - WFormatterOption(&zfmt.StringFormatter{})(w) - require.NotNil(t, w.fmtter, "expected non-nil formatter") + settings := WriterSettings{} + WFormatterOption(&zfmt.StringFormatter{})(&settings) + require.NotNil(t, settings.fmtter, "expected non-nil formatter") } func Test_writeAttributeCarrier_Set(t *testing.T) { From 3963574f7765ed70680aec97c4d778d0b2175fa9 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sat, 21 Sep 2024 08:46:15 -0700 Subject: [PATCH 11/34] Added some tests that will need implementation --- test/schema_registry_test.go | 7 +++++++ test/worker_test.go | 18 ++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 3b9853b..015183c 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -344,6 +344,13 @@ func Test_SchemaRegistry_AutoRegisterSchemas_RequiresSchemaSpecification(t *test require.ErrorContains(t, err, "avro schema is required for schema registry formatter") } +func Test_ProtoSchemaRegistry(t *testing.T) { + require.Fail(t, "implement") +} + +func Test_JsonSchemaRegistry(t *testing.T) { + require.Fail(t, "implement") +} func checkShouldSkipTest(t *testing.T, flags ...string) { t.Helper() for _, flag := range flags { diff --git a/test/worker_test.go b/test/worker_test.go index d1334c9..f22584f 100644 --- a/test/worker_test.go +++ b/test/worker_test.go @@ -2060,6 +2060,24 @@ func TestWork_ShutdownCausesRunExit(t *testing.T) { require.NoError(t, err) } +func Test_DeadeletterClientDoesntCollidWithProducer(t *testing.T) { + // work with deadletter clientid=123 + // producer clientid=123 + // This is a classic pattern. connecetor style. Shouldn't have issues with writes + t.Fail() +} + +func Test_MissingBootstrap_ShouldGiveClearError(t *testing.T) { + t.Fail() +} + +func Test_FailedAuthentication(t *testing.T) { + // not sure if we can even do this with the local broker + // See if can require username/password for dynamically provisioned topic + t.Fail() + +} + // $ go test -run=XXX -bench=BenchmarkWork_Run_CircuitBreaker_BusyLoopBreaker -cpuprofile profile_cpu.out // $ go tool pprof --web profile_cpu.out // $ go tool pprof -http=":8000" test.test ./profile_cpu.out From 355e8b44b8155575e9f0251b3f0daefa364bf8a3 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sat, 21 Sep 2024 22:51:11 -0700 Subject: [PATCH 12/34] Added proto schema registry support --- Makefile | 15 +- client.go | 101 +++++-- client_test.go | 6 +- coverage.sh | 4 + example/compose.yaml | 4 +- example/producer_avro/main.go | 2 +- formatter.go | 88 +++--- go.mod | 7 +- go.sum | 5 + reader.go | 4 - reader_test.go | 3 +- .../avro1/schema_1_gen.go} | 2 +- .../avro2/schema_2_gen.go} | 2 +- test/evolution/proto1/schema_1.pb.go | 182 ++++++++++++ test/evolution/proto2/schema_2.pb.go | 192 +++++++++++++ .../schema_1.avsc} | 0 test/evolution/schema_1.proto | 13 + .../schema_2.avsc} | 0 test/evolution/schema_2.proto | 14 + test/schema_registry_evo_test.go | 258 ++++++++++++++++++ test/schema_registry_test.go | 172 ++---------- 21 files changed, 841 insertions(+), 233 deletions(-) rename test/{heetch1/dummy_event_1_gen.go => evolution/avro1/schema_1_gen.go} (97%) rename test/{heetch2/dummy_event_2_gen.go => evolution/avro2/schema_2_gen.go} (98%) create mode 100644 test/evolution/proto1/schema_1.pb.go create mode 100644 test/evolution/proto2/schema_2.pb.go rename test/{dummy_event_1.avsc => evolution/schema_1.avsc} (100%) create mode 100644 test/evolution/schema_1.proto rename test/{dummy_event_2.avsc => evolution/schema_2.avsc} (100%) create mode 100644 test/evolution/schema_2.proto create mode 100644 test/schema_registry_evo_test.go diff --git a/Makefile b/Makefile index 266b2a3..b6a2494 100644 --- a/Makefile +++ b/Makefile @@ -43,10 +43,15 @@ golangci-lint: golangci-lint run --path-prefix $(mod) ./...) &&) true .PHONY: gen -gen: +gen: protoc-exists + cd test/evolution; protoc --proto_path=. --go_out=./ ./schema_1.proto + cd test/evolution; protoc --proto_path=. --go_out=./ ./schema_2.proto go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p main -d ./example/producer_avro ./example/producer_avro/dummy_event.avsc go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p main -d ./example/worker_avro ./example/worker_avro/dummy_event.avsc - go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p heetch1 -d ./test/heetch1 ./test/dummy_event_1.avsc - go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p heetch2 -d ./test/heetch2 ./test/dummy_event_2.avsc - #go run github.com/hamba/avro/v2/cmd/avrogen@v2.25.1 -pkg main -o ./example/producer_avro/bla.go -tags json:snake,yaml:upper-camel ./example/producer_avro/dummy_event.avsc - #go run github.com/hamba/avro/v2/cmd/avrogen@v2.25.1 -pkg main -o ./example/worker_avro/bla.go -tags json:snake,yaml:upper-camel ./example/worker_avro/dummy_event.avsc + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p avro1 -d ./test/evolution/avro1 ./test/evolution/schema_1.avsc + go run github.com/heetch/avro/cmd/avrogo@v0.4.5 -p avro2 -d ./test/evolution/avro2 ./test/evolution/schema_2.avsc + +# a forced dependency which fails (and prints) if `avro-tools` isn't installed +.PHONY: protoc-exists +protoc-exists: + @which protoc > /dev/null || (echo "protoc is not installed. Install via `brew install protobuf`"; exit 1) \ No newline at end of file diff --git a/client.go b/client.go index aac8b40..b71f3b9 100644 --- a/client.go +++ b/client.go @@ -11,6 +11,8 @@ import ( "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" + "github.com/zillow/zfmt" "go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/trace" ) @@ -29,24 +31,18 @@ const instrumentationName = "github.com/zillow/zkafka" // Client helps instantiate usable readers and writers type Client struct { - mu sync.RWMutex - conf Config - readers map[string]Reader - writers map[string]Writer - logger Logger - lifecycle LifecycleHooks - groupPrefix string - tp trace.TracerProvider - p propagation.TextMapPropagator - srClProvider srProvider2 - //writerProvider writerProvider - //readerProvider readerProvider + mu sync.RWMutex + conf Config + readers map[string]Reader + writers map[string]Writer + logger Logger + lifecycle LifecycleHooks + groupPrefix string + tp trace.TracerProvider + p propagation.TextMapPropagator srf *schemaRegistryFactory - //mmu sync.Mutex - //srCls map[string]schemaregistry.Client - // confluent dependencies producerProvider confluentProducerProvider consumerProvider confluentConsumerProvider } @@ -62,7 +58,7 @@ func NewClient(conf Config, opts ...Option) *Client { producerProvider: defaultConfluentProducerProvider{}.NewProducer, consumerProvider: defaultConfluentConsumerProvider{}.NewConsumer, - srClProvider: srf.create, + srf: srf, } for _, opt := range opts { opt(c) @@ -96,11 +92,10 @@ func (c *Client) Reader(_ context.Context, topicConfig ConsumerTopicConfig, opts return r, nil } - formatter, err := getFormatter(formatterArgs{ + formatter, err := c.getFormatter(formatterArgs{ formatter: topicConfig.Formatter, schemaID: topicConfig.SchemaID, srCfg: topicConfig.SchemaRegistry, - getSR: c.srClProvider, }) if err != nil { return nil, err @@ -144,11 +139,10 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts if exist && !isClosed { return w, nil } - formatter, err := getFormatter(formatterArgs{ + formatter, err := c.getFormatter(formatterArgs{ formatter: topicConfig.Formatter, schemaID: topicConfig.SchemaID, srCfg: topicConfig.SchemaRegistry, - getSR: c.srClProvider, }) if err != nil { @@ -193,6 +187,36 @@ func (c *Client) Close() error { return err } +func (c *Client) getFormatter(args formatterArgs) (kFormatter, error) { + formatter := args.formatter + schemaID := args.schemaID + + switch formatter { + case AvroSchemaRegistry: + scl, err := c.srf.createAvro(args.srCfg) + if err != nil { + return nil, err + } + cf, err := newAvroSchemaRegistryFormatter(scl) + return cf, err + case ProtoSchemaRegistry: + scl, err := c.srf.createProto(args.srCfg) + if err != nil { + return nil, err + } + cf := newProtoSchemaRegistryFormatter(scl) + return cf, nil + case CustomFmt: + return &errFormatter{}, nil + default: + f, err := zfmt.GetFormatter(formatter, schemaID) + if err != nil { + return nil, fmt.Errorf("unsupported formatter %s", formatter) + } + return zfmtShim{F: f}, nil + } +} + type schemaRegistryFactory struct { mmu sync.Mutex srCls map[string]schemaregistry.Client @@ -204,16 +228,16 @@ func newSchemaRegistryFactory() *schemaRegistryFactory { } } -func (c *schemaRegistryFactory) create(srConfig SchemaRegistryConfig) (schemaRegistryCl, error) { +func (c *schemaRegistryFactory) createAvro(srConfig SchemaRegistryConfig) (avroFmt, error) { cl, err := c.getSchemaClient(srConfig) if err != nil { - return nil, err + return avroFmt{}, err } deserConfig := avrov2.NewDeserializerConfig() deser, err := avrov2.NewDeserializer(cl, serde.ValueSerde, deserConfig) if err != nil { - return shim{}, fmt.Errorf("failed to create deserializer: %w", err) + return avroFmt{}, fmt.Errorf("failed to create deserializer: %w", err) } serConfig := avrov2.NewSerializerConfig() @@ -222,12 +246,39 @@ func (c *schemaRegistryFactory) create(srConfig SchemaRegistryConfig) (schemaReg ser, err := avrov2.NewSerializer(cl, serde.ValueSerde, serConfig) if err != nil { - return shim{}, fmt.Errorf("failed to create serializer: %w", err) + return avroFmt{}, fmt.Errorf("failed to create serializer: %w", err) + } + return avroFmt{ + ser: ser, + deser: deser, + }, nil +} + +func (c *schemaRegistryFactory) createProto(srConfig SchemaRegistryConfig) (protoFmt, error) { + cl, err := c.getSchemaClient(srConfig) + if err != nil { + return protoFmt{}, err } - return shim{ + + deserConfig := protobuf.NewDeserializerConfig() + deser, err := protobuf.NewDeserializer(cl, serde.ValueSerde, deserConfig) + if err != nil { + return protoFmt{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := protobuf.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := protobuf.NewSerializer(cl, serde.ValueSerde, serConfig) + if err != nil { + return protoFmt{}, fmt.Errorf("failed to create serializer: %w", err) + } + return protoFmt{ ser: ser, deser: deser, }, nil + } func (c *schemaRegistryFactory) getSchemaClient(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { diff --git a/client_test.go b/client_test.go index a03a482..3ea4ed4 100644 --- a/client_test.go +++ b/client_test.go @@ -608,7 +608,8 @@ func Test_getFormatter_Consumer(t *testing.T) { formatter: tt.args.topicConfig.Formatter, schemaID: tt.args.topicConfig.SchemaID, } - got, err := getFormatter(args) + c := Client{} + got, err := c.getFormatter(args) if tt.wantErr { require.Error(t, err) } else { @@ -649,7 +650,8 @@ func Test_getFormatter_Producer(t *testing.T) { formatter: tt.args.topicConfig.Formatter, schemaID: tt.args.topicConfig.SchemaID, } - got, err := getFormatter(args) + c := Client{} + got, err := c.getFormatter(args) if tt.wantErr { require.Error(t, err) } else { diff --git a/coverage.sh b/coverage.sh index 9948763..b1a748e 100755 --- a/coverage.sh +++ b/coverage.sh @@ -1,6 +1,10 @@ #!/usr/bin/env bash set -x +# Protobuf schema registry schema evolution tests register the same message type +# in the same package. These are compiled in the same binary (the test), and by default +# proto panics in such a situation. Setting this envvar ignores that check +export GOLANG_PROTOBUF_REGISTRATION_CONFLICT=ignore # golang packages that will be used for either testing or will be assessed for coverage pck1=github.com/zillow/zkafka pck2=$pck1/test diff --git a/example/compose.yaml b/example/compose.yaml index 52d7b61..25505ec 100644 --- a/example/compose.yaml +++ b/example/compose.yaml @@ -23,7 +23,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 schemaregistry: - image: confluentinc/cp-schema-registry:5.3.2 + image: confluentinc/cp-schema-registry:7.7.1 depends_on: - kafka - zookeeper @@ -34,4 +34,6 @@ services: SCHEMA_REGISTRY_HOST_NAME: schemaregistry SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 SCHEMA_REGISTRY_DEBUG: true + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092 + diff --git a/example/producer_avro/main.go b/example/producer_avro/main.go index 4648f1d..b804160 100644 --- a/example/producer_avro/main.go +++ b/example/producer_avro/main.go @@ -20,7 +20,7 @@ func main() { }).Writer(ctx, zkafka.ProducerTopicConfig{ ClientID: "example", Topic: "zkafka-example-topic", - Formatter: zkafka.AvroConfluentFmt, + Formatter: zkafka.AvroSchemaRegistry, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "http://localhost:8081", Serialization: struct{ AutoRegisterSchemas bool }{ diff --git a/formatter.go b/formatter.go index 4ee1c5d..f528a87 100644 --- a/formatter.go +++ b/formatter.go @@ -6,13 +6,18 @@ import ( "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" "github.com/zillow/zfmt" + //"k8s.io/apimachinery/pkg/runtime/serializer/protobuf" ) const ( // CustomFmt indicates that the user would pass in their own Formatter later - CustomFmt zfmt.FormatterType = "custom" - AvroConfluentFmt zfmt.FormatterType = "avro_confluent" + CustomFmt zfmt.FormatterType = "custom" + // AvroSchemaRegistry uses confluent's schema registry. It encodes a schemaID as the first 5 bytes and then avro serializes (binary) + // for the remaining part of the payload. It is the successor to `avro_schema` which ships with zfmt, + AvroSchemaRegistry zfmt.FormatterType = "avro_schema_registry" + ProtoSchemaRegistry zfmt.FormatterType = "proto_schema_registry" ) var errMissingFmtter = errors.New("custom formatter is missing, did you forget to call WithFormatter()") @@ -61,56 +66,30 @@ type formatterArgs struct { formatter zfmt.FormatterType schemaID int srCfg SchemaRegistryConfig - getSR srProvider2 -} - -func getFormatter(args formatterArgs) (kFormatter, error) { - formatter := args.formatter - schemaID := args.schemaID - - switch formatter { - case AvroConfluentFmt: - srCfg := args.srCfg - getSR := args.getSR - scl, err := getSR(srCfg) - if err != nil { - return nil, err - } - cf, err := NewAvroSchemaRegistryFormatter(scl) - return cf, err - case CustomFmt: - return &errFormatter{}, nil - default: - f, err := zfmt.GetFormatter(formatter, schemaID) - if err != nil { - return nil, fmt.Errorf("unsupported formatter %s", formatter) - } - return zfmtShim{F: f}, nil - } } // errFormatter is a formatter that returns error when called. The error will remind the user // to provide appropriate implementation type errFormatter struct{} -// Marshall returns error with reminder +// marshall returns error with reminder func (f errFormatter) marshall(req marshReq) ([]byte, error) { return nil, errMissingFmtter } -// Unmarshal returns error with reminder +// unmarshal returns error with reminder func (f errFormatter) unmarshal(req unmarshReq) error { return errMissingFmtter } type avroSchemaRegistryFormatter struct { - schemaRegistryCl schemaRegistryCl - f zfmt.SchematizedAvroFormatter + afmt avroFmt + f zfmt.SchematizedAvroFormatter } -func NewAvroSchemaRegistryFormatter(shimCl schemaRegistryCl) (avroSchemaRegistryFormatter, error) { +func newAvroSchemaRegistryFormatter(afmt avroFmt) (avroSchemaRegistryFormatter, error) { return avroSchemaRegistryFormatter{ - schemaRegistryCl: shimCl, + afmt: afmt, }, nil } @@ -118,7 +97,7 @@ func (f avroSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { if req.schema == "" { return nil, errors.New("avro schema is required for schema registry formatter") } - id, err := f.schemaRegistryCl.GetID(req.topic, req.schema) + id, err := f.afmt.GetID(req.topic, req.schema) if err != nil { return nil, fmt.Errorf("failed to get avro schema by id for topic %s: %w", req.topic, err) } @@ -131,29 +110,52 @@ func (f avroSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { } func (f avroSchemaRegistryFormatter) unmarshal(req unmarshReq) error { - err := f.schemaRegistryCl.Deserialize(req.topic, req.data, &req.target) + err := f.afmt.Deserialize(req.topic, req.data, &req.target) if err != nil { return fmt.Errorf("failed to deserialize to confluent schema registry avro type: %w", err) } return nil } -type schemaRegistryCl interface { - GetID(topic string, avroSchema string) (int, error) - Deserialize(topic string, value []byte, target any) error +type protoSchemaRegistryFormatter struct { + pfmt protoFmt +} + +func newProtoSchemaRegistryFormatter(pfmt protoFmt) protoSchemaRegistryFormatter { + return protoSchemaRegistryFormatter{ + pfmt: pfmt, + } } -var _ schemaRegistryCl = (*shim)(nil) +func (f protoSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { + msgBytes, err := f.pfmt.ser.Serialize(req.topic, req.subject) + if err != nil { + return nil, fmt.Errorf("failed to proto serialize: %w", err) + } + return msgBytes, nil +} -type shim struct { +func (f protoSchemaRegistryFormatter) unmarshal(req unmarshReq) error { + if err := f.pfmt.deser.DeserializeInto(req.topic, req.data, req.target); err != nil { + return fmt.Errorf("failed to proto deserialize: %w", err) + } + return nil +} + +type avroFmt struct { ser *avrov2.Serializer deser *avrov2.Deserializer } -func (s shim) GetID(topic string, avroSchema string) (int, error) { +func (s avroFmt) GetID(topic string, avroSchema string) (int, error) { return s.ser.GetID(topic, nil, &schemaregistry.SchemaInfo{Schema: avroSchema}) } -func (s shim) Deserialize(topic string, value []byte, target any) error { +func (s avroFmt) Deserialize(topic string, value []byte, target any) error { return s.deser.DeserializeInto(topic, value, target) } + +type protoFmt struct { + ser *protobuf.Serializer + deser *protobuf.Deserializer +} diff --git a/go.mod b/go.mod index d5a9ec6..16ad96f 100644 --- a/go.mod +++ b/go.mod @@ -7,28 +7,31 @@ require ( github.com/golang/mock v1.6.0 github.com/google/go-cmp v0.6.0 github.com/google/uuid v1.6.0 + github.com/heetch/avro v0.4.5 github.com/sony/gobreaker v1.0.0 github.com/stretchr/testify v1.9.0 github.com/zillow/zfmt v1.0.1 go.opentelemetry.io/otel v1.28.0 go.opentelemetry.io/otel/trace v1.28.0 golang.org/x/sync v0.7.0 + google.golang.org/protobuf v1.34.2 ) require ( github.com/actgardner/gogen-avro/v10 v10.2.1 // indirect + github.com/bufbuild/protocompile v0.8.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/hamba/avro/v2 v2.20.1 // indirect - github.com/heetch/avro v0.4.5 // indirect + github.com/jhump/protoreflect v1.15.6 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect go.opentelemetry.io/otel/metric v1.28.0 // indirect - google.golang.org/protobuf v1.34.2 // indirect + google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 1e0f77a..d13b414 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,4 @@ +cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM= cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU= cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= @@ -64,6 +65,8 @@ github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bufbuild/protocompile v0.8.0 h1:9Kp1q6OkS9L4nM3FYbr8vlJnEwtbpDPQlQOVXfR+78s= +github.com/bufbuild/protocompile v0.8.0/go.mod h1:+Etjg4guZoAqzVk2czwEQP12yaxLJ8DxuqCJ9qHdH94= github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= github.com/cenkalti/backoff/v3 v3.0.0 h1:ske+9nBpD9qZsTBoF41nW5L+AIuFBKMeze18XQ3eG1c= @@ -219,6 +222,8 @@ github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jhump/protoreflect v1.15.6 h1:WMYJbw2Wo+KOWwZFvgY0jMoVHM6i4XIvRs2RcBj5VmI= +github.com/jhump/protoreflect v1.15.6/go.mod h1:jCHoyYQIJnaabEYnbGwyo9hUqfyUMTbJw/tAut5t97E= github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= diff --git a/reader.go b/reader.go index dcf9e7b..f2197e8 100644 --- a/reader.go +++ b/reader.go @@ -51,8 +51,6 @@ type KReader struct { tCommitMgr *topicCommitMgr } -type srProvider2 func(_ SchemaRegistryConfig) (schemaRegistryCl, error) - type readerArgs struct { cfg Config cCfg ConsumerTopicConfig @@ -64,8 +62,6 @@ type readerArgs struct { opts []ReaderOption } -//type readerProvider func(args readerArgs) (Reader, error) - // newReader makes a new reader based on the configurations func newReader(args readerArgs) (*KReader, error) { conf := args.cfg diff --git a/reader_test.go b/reader_test.go index a1aa7a7..9cbe688 100644 --- a/reader_test.go +++ b/reader_test.go @@ -66,7 +66,8 @@ func TestReader_Read(t *testing.T) { m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer - f, err := getFormatter(formatterArgs{formatter: topicConfig.Formatter}) + c := Client{} + f, err := c.getFormatter(formatterArgs{formatter: topicConfig.Formatter}) require.NoError(t, err) args := readerArgs{ diff --git a/test/heetch1/dummy_event_1_gen.go b/test/evolution/avro1/schema_1_gen.go similarity index 97% rename from test/heetch1/dummy_event_1_gen.go rename to test/evolution/avro1/schema_1_gen.go index c741965..832022c 100644 --- a/test/heetch1/dummy_event_1_gen.go +++ b/test/evolution/avro1/schema_1_gen.go @@ -1,6 +1,6 @@ // Code generated by avrogen. DO NOT EDIT. -package heetch1 +package avro1 import ( "github.com/heetch/avro/avrotypegen" diff --git a/test/heetch2/dummy_event_2_gen.go b/test/evolution/avro2/schema_2_gen.go similarity index 98% rename from test/heetch2/dummy_event_2_gen.go rename to test/evolution/avro2/schema_2_gen.go index 509bad9..13d289c 100644 --- a/test/heetch2/dummy_event_2_gen.go +++ b/test/evolution/avro2/schema_2_gen.go @@ -1,6 +1,6 @@ // Code generated by avrogen. DO NOT EDIT. -package heetch2 +package avro2 import ( "github.com/heetch/avro/avrotypegen" diff --git a/test/evolution/proto1/schema_1.pb.go b/test/evolution/proto1/schema_1.pb.go new file mode 100644 index 0000000..4538169 --- /dev/null +++ b/test/evolution/proto1/schema_1.pb.go @@ -0,0 +1,182 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc v5.27.0 +// source: schema_1.proto + +package proto1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type DummyEvent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + IntField int64 `protobuf:"varint,1,opt,name=IntField,proto3" json:"IntField,omitempty"` + DoubleField float32 `protobuf:"fixed32,2,opt,name=DoubleField,proto3" json:"DoubleField,omitempty"` + StringField string `protobuf:"bytes,3,opt,name=StringField,proto3" json:"StringField,omitempty"` + BoolField bool `protobuf:"varint,4,opt,name=BoolField,proto3" json:"BoolField,omitempty"` + BytesField []byte `protobuf:"bytes,5,opt,name=BytesField,proto3" json:"BytesField,omitempty"` +} + +func (x *DummyEvent) Reset() { + *x = DummyEvent{} + if protoimpl.UnsafeEnabled { + mi := &file_schema_1_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DummyEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DummyEvent) ProtoMessage() {} + +func (x *DummyEvent) ProtoReflect() protoreflect.Message { + mi := &file_schema_1_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DummyEvent.ProtoReflect.Descriptor instead. +func (*DummyEvent) Descriptor() ([]byte, []int) { + return file_schema_1_proto_rawDescGZIP(), []int{0} +} + +func (x *DummyEvent) GetIntField() int64 { + if x != nil { + return x.IntField + } + return 0 +} + +func (x *DummyEvent) GetDoubleField() float32 { + if x != nil { + return x.DoubleField + } + return 0 +} + +func (x *DummyEvent) GetStringField() string { + if x != nil { + return x.StringField + } + return "" +} + +func (x *DummyEvent) GetBoolField() bool { + if x != nil { + return x.BoolField + } + return false +} + +func (x *DummyEvent) GetBytesField() []byte { + if x != nil { + return x.BytesField + } + return nil +} + +var File_schema_1_proto protoreflect.FileDescriptor + +var file_schema_1_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x09, 0x65, 0x76, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xaa, 0x01, 0x0a, 0x0a, + 0x44, 0x75, 0x6d, 0x6d, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, 0x44, 0x6f, 0x75, + 0x62, 0x6c, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x53, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x42, 0x6f, + 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x42, + 0x6f, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x42, 0x79, 0x74, 0x65, + 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x42, 0x79, + 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x42, 0x09, 0x5a, 0x07, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x31, 0x2f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_schema_1_proto_rawDescOnce sync.Once + file_schema_1_proto_rawDescData = file_schema_1_proto_rawDesc +) + +func file_schema_1_proto_rawDescGZIP() []byte { + file_schema_1_proto_rawDescOnce.Do(func() { + file_schema_1_proto_rawDescData = protoimpl.X.CompressGZIP(file_schema_1_proto_rawDescData) + }) + return file_schema_1_proto_rawDescData +} + +var file_schema_1_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_schema_1_proto_goTypes = []interface{}{ + (*DummyEvent)(nil), // 0: evolution.DummyEvent +} +var file_schema_1_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_schema_1_proto_init() } +func file_schema_1_proto_init() { + if File_schema_1_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_schema_1_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DummyEvent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_schema_1_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_schema_1_proto_goTypes, + DependencyIndexes: file_schema_1_proto_depIdxs, + MessageInfos: file_schema_1_proto_msgTypes, + }.Build() + File_schema_1_proto = out.File + file_schema_1_proto_rawDesc = nil + file_schema_1_proto_goTypes = nil + file_schema_1_proto_depIdxs = nil +} diff --git a/test/evolution/proto2/schema_2.pb.go b/test/evolution/proto2/schema_2.pb.go new file mode 100644 index 0000000..01156d5 --- /dev/null +++ b/test/evolution/proto2/schema_2.pb.go @@ -0,0 +1,192 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc v5.27.0 +// source: schema_2.proto + +package proto2 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type DummyEvent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + IntField int64 `protobuf:"varint,1,opt,name=IntField,proto3" json:"IntField,omitempty"` + DoubleField float32 `protobuf:"fixed32,2,opt,name=DoubleField,proto3" json:"DoubleField,omitempty"` + StringField string `protobuf:"bytes,3,opt,name=StringField,proto3" json:"StringField,omitempty"` + BoolField bool `protobuf:"varint,4,opt,name=BoolField,proto3" json:"BoolField,omitempty"` + BytesField []byte `protobuf:"bytes,5,opt,name=BytesField,proto3" json:"BytesField,omitempty"` + NewField string `protobuf:"bytes,6,opt,name=NewField,proto3" json:"NewField,omitempty"` +} + +func (x *DummyEvent) Reset() { + *x = DummyEvent{} + if protoimpl.UnsafeEnabled { + mi := &file_schema_2_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DummyEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DummyEvent) ProtoMessage() {} + +func (x *DummyEvent) ProtoReflect() protoreflect.Message { + mi := &file_schema_2_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DummyEvent.ProtoReflect.Descriptor instead. +func (*DummyEvent) Descriptor() ([]byte, []int) { + return file_schema_2_proto_rawDescGZIP(), []int{0} +} + +func (x *DummyEvent) GetIntField() int64 { + if x != nil { + return x.IntField + } + return 0 +} + +func (x *DummyEvent) GetDoubleField() float32 { + if x != nil { + return x.DoubleField + } + return 0 +} + +func (x *DummyEvent) GetStringField() string { + if x != nil { + return x.StringField + } + return "" +} + +func (x *DummyEvent) GetBoolField() bool { + if x != nil { + return x.BoolField + } + return false +} + +func (x *DummyEvent) GetBytesField() []byte { + if x != nil { + return x.BytesField + } + return nil +} + +func (x *DummyEvent) GetNewField() string { + if x != nil { + return x.NewField + } + return "" +} + +var File_schema_2_proto protoreflect.FileDescriptor + +var file_schema_2_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x32, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x09, 0x65, 0x76, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xc6, 0x01, 0x0a, 0x0a, + 0x44, 0x75, 0x6d, 0x6d, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x44, 0x6f, 0x75, 0x62, 0x6c, 0x65, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, 0x44, 0x6f, 0x75, + 0x62, 0x6c, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x53, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x53, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x42, 0x6f, + 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x42, + 0x6f, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x42, 0x79, 0x74, 0x65, + 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x42, 0x79, + 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x4e, 0x65, 0x77, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x4e, 0x65, 0x77, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x42, 0x09, 0x5a, 0x07, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x32, 0x2f, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_schema_2_proto_rawDescOnce sync.Once + file_schema_2_proto_rawDescData = file_schema_2_proto_rawDesc +) + +func file_schema_2_proto_rawDescGZIP() []byte { + file_schema_2_proto_rawDescOnce.Do(func() { + file_schema_2_proto_rawDescData = protoimpl.X.CompressGZIP(file_schema_2_proto_rawDescData) + }) + return file_schema_2_proto_rawDescData +} + +var file_schema_2_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_schema_2_proto_goTypes = []interface{}{ + (*DummyEvent)(nil), // 0: evolution.DummyEvent +} +var file_schema_2_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_schema_2_proto_init() } +func file_schema_2_proto_init() { + if File_schema_2_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_schema_2_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DummyEvent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_schema_2_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_schema_2_proto_goTypes, + DependencyIndexes: file_schema_2_proto_depIdxs, + MessageInfos: file_schema_2_proto_msgTypes, + }.Build() + File_schema_2_proto = out.File + file_schema_2_proto_rawDesc = nil + file_schema_2_proto_goTypes = nil + file_schema_2_proto_depIdxs = nil +} diff --git a/test/dummy_event_1.avsc b/test/evolution/schema_1.avsc similarity index 100% rename from test/dummy_event_1.avsc rename to test/evolution/schema_1.avsc diff --git a/test/evolution/schema_1.proto b/test/evolution/schema_1.proto new file mode 100644 index 0000000..46f90aa --- /dev/null +++ b/test/evolution/schema_1.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package evolution; + +option go_package = "proto1/"; + +message DummyEvent { + int64 IntField = 1; + float DoubleField = 2; + string StringField = 3; + bool BoolField = 4; + bytes BytesField = 5; +} \ No newline at end of file diff --git a/test/dummy_event_2.avsc b/test/evolution/schema_2.avsc similarity index 100% rename from test/dummy_event_2.avsc rename to test/evolution/schema_2.avsc diff --git a/test/evolution/schema_2.proto b/test/evolution/schema_2.proto new file mode 100644 index 0000000..810d6c8 --- /dev/null +++ b/test/evolution/schema_2.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package evolution; + +option go_package = "proto2/"; + +message DummyEvent { + int64 IntField = 1; + float DoubleField = 2; + string StringField = 3; + bool BoolField = 4; + bytes BytesField = 5; + string NewField = 6; +} \ No newline at end of file diff --git a/test/schema_registry_evo_test.go b/test/schema_registry_evo_test.go new file mode 100644 index 0000000..b99e72a --- /dev/null +++ b/test/schema_registry_evo_test.go @@ -0,0 +1,258 @@ +//go:build evolution_test +// +build evolution_test + +package test + +import ( + "context" + _ "embed" + "fmt" + "math/rand" + "testing" + + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "github.com/zillow/zkafka" + "github.com/zillow/zkafka/test/evolution/avro1" + "github.com/zillow/zkafka/test/evolution/avro2" + "github.com/zillow/zkafka/test/evolution/proto1" + "github.com/zillow/zkafka/test/evolution/proto2" +) + +// Test_SchemaRegistryReal_Avro_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom tests +// the `avro_schema_registry` formatter which uses schema registry. +// +// two schemas exists, which are backwards compatible with one another. +// The default behavior of the confluent-kafka-go doesn't handle this well, since the new field in schema2 +// +// which has a default value and is nullable has the default value omitted in schema registration and is therefore +// +// found to be incompatible. Auto registration isn't typically used in production environments, +// but this behavior is still problematic because the implicit schema resolution is used to communicate with schema registry +// and determine the appropriate schemaID to embed. The omitted default means the correct schema isn't found. +// +// The two message successfully writing means the two schemas are registered. +// We then test we can use the confluent deserializer to decode the messages. For both schema1 and schema2. +// This confirms that backwards/forward compatible evolution is possible and old schemas can still read messages from new. +func Test_SchemaRegistryReal_Avro_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema2, + }, + }, + }) + require.NoError(t, err) + + evt1 := avro1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.NoError(t, err) + + evt2 := avro2.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewFieldWithDefault: ptr(uuid.NewString()), + } + _, err = writer2.Write(ctx, evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := avro1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1) + + receivedEvt2Schema1 := avro1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := avro1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1) + + receivedEvt2Schema2 := avro2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2) +} + +func Test_SchemaRegistryReal_Proto_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + //Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + //Schema: dummyEventSchema2, + }, + }, + }) + require.NoError(t, err) + + evt1 := proto1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := proto2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := proto1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema1 := proto1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := proto1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema2 := proto2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(proto2.DummyEvent{})) +} diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 015183c..5b29f14 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -11,147 +11,21 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/require" "github.com/zillow/zkafka" - "github.com/zillow/zkafka/test/heetch1" - "github.com/zillow/zkafka/test/heetch2" + "github.com/zillow/zkafka/test/evolution/avro1" + "github.com/zillow/zkafka/test/evolution/avro2" ) -//go:embed dummy_event_1.avsc +//go:embed evolution/schema_1.avsc var dummyEventSchema1 string -//go:embed dummy_event_2.avsc +//go:embed evolution/schema_2.avsc var dummyEventSchema2 string const enableSchemaRegistryTest = "ENABLE_SCHEMA_REGISTRY_TESTS" const enableKafkaBrokerTest = "ENABLE_KAFKA_BROKER_TESTS" -// Test_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom tests -// the `avro_confluent` formatter which uses schema registry. -// -// two schemas exists, which are backwards compatible with one another. -// The default behavior of the confluent-kafka-go doesn't handle this well, since the new field in schema2 -// -// which has a default value and is nullable has the default value omitted in schema registration and is therefore -// -// found to be incompatible. Auto registration isn't typically used in production environments, -// but this behavior is still problematic because the implicit schema resolution is used to communicate with schema registry -// and determine the appropriate schemaID to embed. The omitted default means the correct schema isn't found. -// -// The two message successfully writing means the two schemas are registered. -// We then test we can use the confluent deserializer to decode the messages. For both schema1 and schema2. -// This confirms that backwards/forward compatible evolution is possible and old schemas can still read messages from new. -func Test_SchemaRegistryReal_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { - checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) - - ctx := context.Background() - topic := "integration-test-topic-2" + uuid.NewString() - bootstrapServer := getBootstrap() - - createTopic(t, bootstrapServer, topic, 1) - t.Logf("Created topic: %s", topic) - - groupID := uuid.NewString() - - client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) - defer func() { require.NoError(t, client.Close()) }() - - t.Log("Created writer with auto registered schemas") - writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ - ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.AvroConfluentFmt, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "http://localhost:8081", - Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - Schema: dummyEventSchema1, - }, - }, - }) - require.NoError(t, err) - - writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ - ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.AvroConfluentFmt, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "http://localhost:8081", - Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - Schema: dummyEventSchema2, - }, - }, - }) - require.NoError(t, err) - - evt1 := heetch1.DummyEvent{ - IntField: int(rand.Int31()), - DoubleField: rand.Float64(), - StringField: uuid.NewString(), - BoolField: true, - BytesField: []byte(uuid.NewString()), - } - // write msg1, and msg2 - _, err = writer1.Write(ctx, evt1) - require.NoError(t, err) - - evt2 := heetch2.DummyEvent{ - IntField: int(rand.Int31()), - DoubleField: rand.Float64(), - StringField: uuid.NewString(), - BoolField: true, - BytesField: []byte(uuid.NewString()), - NewFieldWithDefault: ptr(uuid.NewString()), - } - _, err = writer2.Write(ctx, evt2) - require.NoError(t, err) - - consumerTopicConfig := zkafka.ConsumerTopicConfig{ - ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.AvroConfluentFmt, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "http://localhost:8081", - }, - GroupID: groupID, - AdditionalProps: map[string]any{ - "auto.offset.reset": "earliest", - }, - } - reader, err := client.Reader(ctx, consumerTopicConfig) - require.NoError(t, err) - - t.Log("Begin reading messages") - results, err := readMessages(reader, 2) - require.NoError(t, err) - - msg1 := <-results - msg2 := <-results - t.Log("Close reader") - - require.NoError(t, reader.Close()) - - receivedEvt1 := heetch1.DummyEvent{} - require.NoError(t, msg1.Decode(&receivedEvt1)) - assertEqual(t, evt1, receivedEvt1) - - receivedEvt2Schema1 := heetch1.DummyEvent{} - require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) - expectedEvt2 := heetch1.DummyEvent{ - IntField: evt2.IntField, - DoubleField: evt2.DoubleField, - StringField: evt2.StringField, - BoolField: evt2.BoolField, - BytesField: evt2.BytesField, - } - assertEqual(t, expectedEvt2, receivedEvt2Schema1) - - receivedEvt2Schema2 := heetch2.DummyEvent{} - require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) - assertEqual(t, evt2, receivedEvt2Schema2) -} - func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { - checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) + checkShouldSkipTest(t, enableKafkaBrokerTest) ctx := context.Background() topic := "integration-test-topic-2" + uuid.NewString() @@ -169,7 +43,7 @@ func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegis writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), Topic: topic, - Formatter: zkafka.AvroConfluentFmt, + Formatter: zkafka.AvroSchemaRegistry, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "mock://", Serialization: zkafka.SerializationConfig{ @@ -183,7 +57,7 @@ func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegis writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), Topic: topic, - Formatter: zkafka.AvroConfluentFmt, + Formatter: zkafka.AvroSchemaRegistry, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "mock://", Serialization: zkafka.SerializationConfig{ @@ -194,7 +68,7 @@ func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegis }) require.NoError(t, err) - evt1 := heetch1.DummyEvent{ + evt1 := avro1.DummyEvent{ IntField: int(rand.Int31()), DoubleField: rand.Float64(), StringField: uuid.NewString(), @@ -205,7 +79,7 @@ func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegis _, err = writer1.Write(ctx, evt1) require.NoError(t, err) - evt2 := heetch2.DummyEvent{ + evt2 := avro2.DummyEvent{ IntField: int(rand.Int31()), DoubleField: rand.Float64(), StringField: uuid.NewString(), @@ -219,7 +93,7 @@ func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegis consumerTopicConfig := zkafka.ConsumerTopicConfig{ ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), Topic: topic, - Formatter: zkafka.AvroConfluentFmt, + Formatter: zkafka.AvroSchemaRegistry, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "mock://", }, @@ -241,13 +115,13 @@ func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegis require.NoError(t, reader.Close()) - receivedEvt1 := heetch1.DummyEvent{} + receivedEvt1 := avro1.DummyEvent{} require.NoError(t, msg1.Decode(&receivedEvt1)) assertEqual(t, evt1, receivedEvt1) - receivedEvt2Schema1 := heetch1.DummyEvent{} + receivedEvt2Schema1 := avro1.DummyEvent{} require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) - expectedEvt2 := heetch1.DummyEvent{ + expectedEvt2 := avro1.DummyEvent{ IntField: evt2.IntField, DoubleField: evt2.DoubleField, StringField: evt2.StringField, @@ -256,7 +130,7 @@ func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegis } assertEqual(t, expectedEvt2, receivedEvt2Schema1) - receivedEvt2Schema2 := heetch2.DummyEvent{} + receivedEvt2Schema2 := avro2.DummyEvent{} require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) assertEqual(t, evt2, receivedEvt2Schema2) } @@ -278,7 +152,7 @@ func Test_SchemaRegistry_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), Topic: topic, - Formatter: zkafka.AvroConfluentFmt, + Formatter: zkafka.AvroSchemaRegistry, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "mock://", Serialization: zkafka.SerializationConfig{ @@ -289,7 +163,7 @@ func Test_SchemaRegistry_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing }) require.NoError(t, err) - evt1 := heetch1.DummyEvent{ + evt1 := avro1.DummyEvent{ IntField: int(rand.Int31()), DoubleField: rand.Float64(), StringField: uuid.NewString(), @@ -303,7 +177,7 @@ func Test_SchemaRegistry_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing // Its possible not specify a schema for your producer. // In this case, the underlying lib does -func Test_SchemaRegistry_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { +func Test_SchemaRegistry_Avro_AutoRegisterSchemas_RequiresSchemaSpecification(t *testing.T) { checkShouldSkipTest(t, enableKafkaBrokerTest) ctx := context.Background() @@ -320,7 +194,7 @@ func Test_SchemaRegistry_AutoRegisterSchemas_RequiresSchemaSpecification(t *test writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), Topic: topic, - Formatter: zkafka.AvroConfluentFmt, + Formatter: zkafka.AvroSchemaRegistry, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "mock://", Serialization: zkafka.SerializationConfig{ @@ -332,7 +206,7 @@ func Test_SchemaRegistry_AutoRegisterSchemas_RequiresSchemaSpecification(t *test }) require.NoError(t, err) - evt1 := heetch1.DummyEvent{ + evt1 := avro1.DummyEvent{ IntField: int(rand.Int31()), DoubleField: rand.Float64(), StringField: uuid.NewString(), @@ -344,13 +218,17 @@ func Test_SchemaRegistry_AutoRegisterSchemas_RequiresSchemaSpecification(t *test require.ErrorContains(t, err, "avro schema is required for schema registry formatter") } -func Test_ProtoSchemaRegistry(t *testing.T) { +func Test_SchemaNotRegistered_ImpactToWorker(t *testing.T) { require.Fail(t, "implement") } - func Test_JsonSchemaRegistry(t *testing.T) { require.Fail(t, "implement") } + +func Test_AlternateSubjectNamingStrategy(t *testing.T) { + require.Fail(t, "implement") +} + func checkShouldSkipTest(t *testing.T, flags ...string) { t.Helper() for _, flag := range flags { From 98b801485e952dbab677675e0069b7249331b7eb Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sat, 21 Sep 2024 22:53:59 -0700 Subject: [PATCH 13/34] Updaated --- client.go | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/client.go b/client.go index b71f3b9..4ec02a4 100644 --- a/client.go +++ b/client.go @@ -33,8 +33,8 @@ const instrumentationName = "github.com/zillow/zkafka" type Client struct { mu sync.RWMutex conf Config - readers map[string]Reader - writers map[string]Writer + readers map[string]*KReader + writers map[string]*KWriter logger Logger lifecycle LifecycleHooks groupPrefix string @@ -52,8 +52,8 @@ func NewClient(conf Config, opts ...Option) *Client { srf := newSchemaRegistryFactory() c := &Client{ conf: conf, - readers: make(map[string]Reader), - writers: make(map[string]Writer), + readers: make(map[string]*KReader), + writers: make(map[string]*KWriter), logger: NoopLogger{}, producerProvider: defaultConfluentProducerProvider{}.NewProducer, @@ -74,12 +74,7 @@ func (c *Client) Reader(_ context.Context, topicConfig ConsumerTopicConfig, opts } c.mu.RLock() r, exist := c.readers[topicConfig.ClientID] - kr, ok := r.(*KReader) - // is kr -> isClosed = true -> true - // is kr -> isClosed = false -> false - // is not kr -> false - isClosed := ok && kr.isClosed - if exist && !isClosed { + if exist && !r.isClosed { c.mu.RUnlock() return r, nil } @@ -88,7 +83,7 @@ func (c *Client) Reader(_ context.Context, topicConfig ConsumerTopicConfig, opts c.mu.Lock() defer c.mu.Unlock() r, exist = c.readers[topicConfig.ClientID] - if exist && !isClosed { + if exist && !r.isClosed { return r, nil } @@ -125,9 +120,7 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts } c.mu.RLock() w, exist := c.writers[topicConfig.ClientID] - kr, ok := w.(*KWriter) - isClosed := ok && kr.isClosed - if exist && !isClosed { + if exist && !w.isClosed { c.mu.RUnlock() return w, nil } @@ -136,7 +129,7 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts c.mu.Lock() defer c.mu.Unlock() w, exist = c.writers[topicConfig.ClientID] - if exist && !isClosed { + if exist && !w.isClosed { return w, nil } formatter, err := c.getFormatter(formatterArgs{ From fc6708a16adaf02271304f9244e49017cad93641 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sat, 21 Sep 2024 23:13:29 -0700 Subject: [PATCH 14/34] Updaated --- client.go | 85 --------------------------------- schemareg.go | 92 ++++++++++++++++++++++++++++++++++++ test/schema_registry_test.go | 6 +-- 3 files changed, 95 insertions(+), 88 deletions(-) create mode 100644 schemareg.go diff --git a/client.go b/client.go index 4ec02a4..fea5b09 100644 --- a/client.go +++ b/client.go @@ -4,14 +4,9 @@ package zkafka import ( "context" - "errors" "fmt" "sync" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" "github.com/zillow/zfmt" "go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/trace" @@ -210,86 +205,6 @@ func (c *Client) getFormatter(args formatterArgs) (kFormatter, error) { } } -type schemaRegistryFactory struct { - mmu sync.Mutex - srCls map[string]schemaregistry.Client -} - -func newSchemaRegistryFactory() *schemaRegistryFactory { - return &schemaRegistryFactory{ - srCls: make(map[string]schemaregistry.Client), - } -} - -func (c *schemaRegistryFactory) createAvro(srConfig SchemaRegistryConfig) (avroFmt, error) { - cl, err := c.getSchemaClient(srConfig) - if err != nil { - return avroFmt{}, err - } - - deserConfig := avrov2.NewDeserializerConfig() - deser, err := avrov2.NewDeserializer(cl, serde.ValueSerde, deserConfig) - if err != nil { - return avroFmt{}, fmt.Errorf("failed to create deserializer: %w", err) - } - - serConfig := avrov2.NewSerializerConfig() - serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas - serConfig.NormalizeSchemas = true - - ser, err := avrov2.NewSerializer(cl, serde.ValueSerde, serConfig) - if err != nil { - return avroFmt{}, fmt.Errorf("failed to create serializer: %w", err) - } - return avroFmt{ - ser: ser, - deser: deser, - }, nil -} - -func (c *schemaRegistryFactory) createProto(srConfig SchemaRegistryConfig) (protoFmt, error) { - cl, err := c.getSchemaClient(srConfig) - if err != nil { - return protoFmt{}, err - } - - deserConfig := protobuf.NewDeserializerConfig() - deser, err := protobuf.NewDeserializer(cl, serde.ValueSerde, deserConfig) - if err != nil { - return protoFmt{}, fmt.Errorf("failed to create deserializer: %w", err) - } - - serConfig := protobuf.NewSerializerConfig() - serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas - serConfig.NormalizeSchemas = true - - ser, err := protobuf.NewSerializer(cl, serde.ValueSerde, serConfig) - if err != nil { - return protoFmt{}, fmt.Errorf("failed to create serializer: %w", err) - } - return protoFmt{ - ser: ser, - deser: deser, - }, nil - -} - -func (c *schemaRegistryFactory) getSchemaClient(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { - url := srConfig.URL - if url == "" { - return nil, errors.New("no schema registry url provided") - } - if srCl, ok := c.srCls[url]; ok { - return srCl, nil - } - client, err := schemaregistry.NewClient(schemaregistry.NewConfig(url)) - if err != nil { - return nil, fmt.Errorf("failed to create schema registry client: %w", err) - } - c.srCls[url] = client - return client, nil -} - func getTracer(tp trace.TracerProvider) trace.Tracer { if tp == nil { return nil diff --git a/schemareg.go b/schemareg.go new file mode 100644 index 0000000..647134b --- /dev/null +++ b/schemareg.go @@ -0,0 +1,92 @@ +package zkafka + +import ( + "errors" + "fmt" + "sync" + + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" +) + +type schemaRegistryFactory struct { + mmu sync.Mutex + srCls map[string]schemaregistry.Client +} + +func newSchemaRegistryFactory() *schemaRegistryFactory { + return &schemaRegistryFactory{ + srCls: make(map[string]schemaregistry.Client), + } +} + +func (c *schemaRegistryFactory) createAvro(srConfig SchemaRegistryConfig) (avroFmt, error) { + cl, err := c.getSchemaClient(srConfig) + if err != nil { + return avroFmt{}, err + } + + deserConfig := avrov2.NewDeserializerConfig() + deser, err := avrov2.NewDeserializer(cl, serde.ValueSerde, deserConfig) + if err != nil { + return avroFmt{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := avrov2.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := avrov2.NewSerializer(cl, serde.ValueSerde, serConfig) + if err != nil { + return avroFmt{}, fmt.Errorf("failed to create serializer: %w", err) + } + return avroFmt{ + ser: ser, + deser: deser, + }, nil +} + +func (c *schemaRegistryFactory) createProto(srConfig SchemaRegistryConfig) (protoFmt, error) { + cl, err := c.getSchemaClient(srConfig) + if err != nil { + return protoFmt{}, err + } + + deserConfig := protobuf.NewDeserializerConfig() + deser, err := protobuf.NewDeserializer(cl, serde.ValueSerde, deserConfig) + if err != nil { + return protoFmt{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := protobuf.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := protobuf.NewSerializer(cl, serde.ValueSerde, serConfig) + if err != nil { + return protoFmt{}, fmt.Errorf("failed to create serializer: %w", err) + } + return protoFmt{ + ser: ser, + deser: deser, + }, nil + +} + +func (c *schemaRegistryFactory) getSchemaClient(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { + url := srConfig.URL + if url == "" { + return nil, errors.New("no schema registry url provided") + } + if srCl, ok := c.srCls[url]; ok { + return srCl, nil + } + client, err := schemaregistry.NewClient(schemaregistry.NewConfig(url)) + if err != nil { + return nil, fmt.Errorf("failed to create schema registry client: %w", err) + } + c.srCls[url] = client + return client, nil +} diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 5b29f14..01770b5 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -225,9 +225,9 @@ func Test_JsonSchemaRegistry(t *testing.T) { require.Fail(t, "implement") } -func Test_AlternateSubjectNamingStrategy(t *testing.T) { - require.Fail(t, "implement") -} +//func Test_AlternateSubjectNamingStrategy(t *testing.T) { +// require.Fail(t, "implement") +//} func checkShouldSkipTest(t *testing.T, flags ...string) { t.Helper() From 7cb678a4ee326ff6da65bea5aea422ded7167d44 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sat, 21 Sep 2024 23:26:36 -0700 Subject: [PATCH 15/34] Added json schema registry support --- client.go | 7 ++ formatter.go | 31 ++++---- go.mod | 6 ++ go.sum | 10 +++ schemareg.go | 51 +++++++++++++ test/evolution/json1/schema_1.pb.go | 9 +++ test/evolution/json2/schema_2.go | 10 +++ test/schema_registry_evo_test.go | 111 ++++++++++++++++++++++++++++ test/schema_registry_test.go | 7 -- 9 files changed, 222 insertions(+), 20 deletions(-) create mode 100644 test/evolution/json1/schema_1.pb.go create mode 100644 test/evolution/json2/schema_2.go diff --git a/client.go b/client.go index fea5b09..d2908ae 100644 --- a/client.go +++ b/client.go @@ -194,6 +194,13 @@ func (c *Client) getFormatter(args formatterArgs) (kFormatter, error) { } cf := newProtoSchemaRegistryFormatter(scl) return cf, nil + case JSONSchemaRegistry: + scl, err := c.srf.createJson(args.srCfg) + if err != nil { + return nil, err + } + cf := newJsonSchemaRegistryFormatter(scl) + return cf, nil case CustomFmt: return &errFormatter{}, nil default: diff --git a/formatter.go b/formatter.go index f528a87..c056c36 100644 --- a/formatter.go +++ b/formatter.go @@ -4,9 +4,6 @@ import ( "errors" "fmt" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" - "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" "github.com/zillow/zfmt" //"k8s.io/apimachinery/pkg/runtime/serializer/protobuf" ) @@ -18,6 +15,7 @@ const ( // for the remaining part of the payload. It is the successor to `avro_schema` which ships with zfmt, AvroSchemaRegistry zfmt.FormatterType = "avro_schema_registry" ProtoSchemaRegistry zfmt.FormatterType = "proto_schema_registry" + JSONSchemaRegistry zfmt.FormatterType = "json_schema_registry" ) var errMissingFmtter = errors.New("custom formatter is missing, did you forget to call WithFormatter()") @@ -142,20 +140,27 @@ func (f protoSchemaRegistryFormatter) unmarshal(req unmarshReq) error { return nil } -type avroFmt struct { - ser *avrov2.Serializer - deser *avrov2.Deserializer +type jsonSchemaRegistryFormatter struct { + jfmt jsonFmt } -func (s avroFmt) GetID(topic string, avroSchema string) (int, error) { - return s.ser.GetID(topic, nil, &schemaregistry.SchemaInfo{Schema: avroSchema}) +func newJsonSchemaRegistryFormatter(jfmt jsonFmt) jsonSchemaRegistryFormatter { + return jsonSchemaRegistryFormatter{ + jfmt: jfmt, + } } -func (s avroFmt) Deserialize(topic string, value []byte, target any) error { - return s.deser.DeserializeInto(topic, value, target) +func (f jsonSchemaRegistryFormatter) marshall(req marshReq) ([]byte, error) { + msgBytes, err := f.jfmt.ser.Serialize(req.topic, req.subject) + if err != nil { + return nil, fmt.Errorf("failed to json schema serialize: %w", err) + } + return msgBytes, nil } -type protoFmt struct { - ser *protobuf.Serializer - deser *protobuf.Deserializer +func (f jsonSchemaRegistryFormatter) unmarshal(req unmarshReq) error { + if err := f.jfmt.deser.DeserializeInto(req.topic, req.data, req.target); err != nil { + return fmt.Errorf("failed to json schema deserialize: %w", err) + } + return nil } diff --git a/go.mod b/go.mod index 16ad96f..8d4abb0 100644 --- a/go.mod +++ b/go.mod @@ -19,18 +19,24 @@ require ( require ( github.com/actgardner/gogen-avro/v10 v10.2.1 // indirect + github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/bufbuild/protocompile v0.8.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/hamba/avro/v2 v2.20.1 // indirect + github.com/invopop/jsonschema v0.12.0 // indirect github.com/jhump/protoreflect v1.15.6 // indirect github.com/json-iterator/go v1.1.12 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/santhosh-tekuri/jsonschema/v5 v5.3.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect go.opentelemetry.io/otel/metric v1.28.0 // indirect google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index d13b414..ba5f7cf 100644 --- a/go.sum +++ b/go.sum @@ -63,12 +63,16 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bufbuild/protocompile v0.8.0 h1:9Kp1q6OkS9L4nM3FYbr8vlJnEwtbpDPQlQOVXfR+78s= github.com/bufbuild/protocompile v0.8.0/go.mod h1:+Etjg4guZoAqzVk2czwEQP12yaxLJ8DxuqCJ9qHdH94= github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/cenkalti/backoff/v3 v3.0.0 h1:ske+9nBpD9qZsTBoF41nW5L+AIuFBKMeze18XQ3eG1c= github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= @@ -222,6 +226,8 @@ github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI= +github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= github.com/jhump/protoreflect v1.15.6 h1:WMYJbw2Wo+KOWwZFvgY0jMoVHM6i4XIvRs2RcBj5VmI= github.com/jhump/protoreflect v1.15.6/go.mod h1:jCHoyYQIJnaabEYnbGwyo9hUqfyUMTbJw/tAut5t97E= github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= @@ -331,6 +337,8 @@ github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjR github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.0 h1:uIkTLo0AGRc8l7h5l9r+GcYi9qfVPt6lD4/bhmzfiKo= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= @@ -383,6 +391,8 @@ github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/ github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs= github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc= +github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc= +github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= diff --git a/schemareg.go b/schemareg.go index 647134b..f6bc3bc 100644 --- a/schemareg.go +++ b/schemareg.go @@ -8,6 +8,7 @@ import ( "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avrov2" + "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/jsonschema" "github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/protobuf" ) @@ -75,6 +76,33 @@ func (c *schemaRegistryFactory) createProto(srConfig SchemaRegistryConfig) (prot } +func (c *schemaRegistryFactory) createJson(srConfig SchemaRegistryConfig) (jsonFmt, error) { + cl, err := c.getSchemaClient(srConfig) + if err != nil { + return jsonFmt{}, err + } + + deserConfig := jsonschema.NewDeserializerConfig() + deser, err := jsonschema.NewDeserializer(cl, serde.ValueSerde, deserConfig) + if err != nil { + return jsonFmt{}, fmt.Errorf("failed to create deserializer: %w", err) + } + + serConfig := jsonschema.NewSerializerConfig() + serConfig.AutoRegisterSchemas = srConfig.Serialization.AutoRegisterSchemas + serConfig.NormalizeSchemas = true + + ser, err := jsonschema.NewSerializer(cl, serde.ValueSerde, serConfig) + if err != nil { + return jsonFmt{}, fmt.Errorf("failed to create serializer: %w", err) + } + return jsonFmt{ + ser: ser, + deser: deser, + }, nil + +} + func (c *schemaRegistryFactory) getSchemaClient(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { url := srConfig.URL if url == "" { @@ -90,3 +118,26 @@ func (c *schemaRegistryFactory) getSchemaClient(srConfig SchemaRegistryConfig) ( c.srCls[url] = client return client, nil } + +type avroFmt struct { + ser *avrov2.Serializer + deser *avrov2.Deserializer +} + +func (s avroFmt) GetID(topic string, avroSchema string) (int, error) { + return s.ser.GetID(topic, nil, &schemaregistry.SchemaInfo{Schema: avroSchema}) +} + +func (s avroFmt) Deserialize(topic string, value []byte, target any) error { + return s.deser.DeserializeInto(topic, value, target) +} + +type protoFmt struct { + ser *protobuf.Serializer + deser *protobuf.Deserializer +} + +type jsonFmt struct { + ser *jsonschema.Serializer + deser *jsonschema.Deserializer +} diff --git a/test/evolution/json1/schema_1.pb.go b/test/evolution/json1/schema_1.pb.go new file mode 100644 index 0000000..e7e52be --- /dev/null +++ b/test/evolution/json1/schema_1.pb.go @@ -0,0 +1,9 @@ +package json1 + +type DummyEvent struct { + IntField int64 `json:"IntField,omitempty"` + DoubleField float32 `json:"DoubleField,omitempty"` + StringField string `json:"StringField,omitempty"` + BoolField bool `json:"BoolField,omitempty"` + BytesField []byte `json:"BytesField,omitempty"` +} diff --git a/test/evolution/json2/schema_2.go b/test/evolution/json2/schema_2.go new file mode 100644 index 0000000..5d26adb --- /dev/null +++ b/test/evolution/json2/schema_2.go @@ -0,0 +1,10 @@ +package json2 + +type DummyEvent struct { + IntField int64 `json:"IntField,omitempty"` + DoubleField float32 `json:"DoubleField,omitempty"` + StringField string `json:"StringField,omitempty"` + BoolField bool `json:"BoolField,omitempty"` + BytesField []byte `json:"BytesField,omitempty"` + NewField string `json:"NewField,omitempty"` +} diff --git a/test/schema_registry_evo_test.go b/test/schema_registry_evo_test.go index b99e72a..f42e02a 100644 --- a/test/schema_registry_evo_test.go +++ b/test/schema_registry_evo_test.go @@ -16,6 +16,8 @@ import ( "github.com/zillow/zkafka" "github.com/zillow/zkafka/test/evolution/avro1" "github.com/zillow/zkafka/test/evolution/avro2" + "github.com/zillow/zkafka/test/evolution/json1" + "github.com/zillow/zkafka/test/evolution/json2" "github.com/zillow/zkafka/test/evolution/proto1" "github.com/zillow/zkafka/test/evolution/proto2" ) @@ -256,3 +258,112 @@ func Test_SchemaRegistryReal_Proto_AutoRegisterSchemas_BackwardCompatibleSchemas require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(proto2.DummyEvent{})) } + +func Test_SchemaRegistryReal_JSON_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest, enableSchemaRegistryTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := json1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := json2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "http://localhost:8081", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := json1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema1 := json1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := json1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema2 := json2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(json2.DummyEvent{})) +} diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 01770b5..381e96d 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -221,13 +221,6 @@ func Test_SchemaRegistry_Avro_AutoRegisterSchemas_RequiresSchemaSpecification(t func Test_SchemaNotRegistered_ImpactToWorker(t *testing.T) { require.Fail(t, "implement") } -func Test_JsonSchemaRegistry(t *testing.T) { - require.Fail(t, "implement") -} - -//func Test_AlternateSubjectNamingStrategy(t *testing.T) { -// require.Fail(t, "implement") -//} func checkShouldSkipTest(t *testing.T, flags ...string) { t.Helper() From 78d3ac732b6a64eafa16045108e2931713498d40 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sat, 21 Sep 2024 23:32:21 -0700 Subject: [PATCH 16/34] Updated --- formatter.go | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/formatter.go b/formatter.go index c056c36..2489652 100644 --- a/formatter.go +++ b/formatter.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/zillow/zfmt" - //"k8s.io/apimachinery/pkg/runtime/serializer/protobuf" ) const ( @@ -13,9 +12,16 @@ const ( CustomFmt zfmt.FormatterType = "custom" // AvroSchemaRegistry uses confluent's schema registry. It encodes a schemaID as the first 5 bytes and then avro serializes (binary) // for the remaining part of the payload. It is the successor to `avro_schema` which ships with zfmt, - AvroSchemaRegistry zfmt.FormatterType = "avro_schema_registry" + AvroSchemaRegistry zfmt.FormatterType = "avro_schema_registry" + + // ProtoSchemaRegistry uses confluent's schema registry. It encodes a schemaID as well as the message types as + // a payload prefix and then proto serializes (binary) for the remaining part of the payload. + // zfmt.ProtoSchemaDeprecatedFmt had a bug in its implementation and didn't work properly with confluent ProtoSchemaRegistry zfmt.FormatterType = "proto_schema_registry" - JSONSchemaRegistry zfmt.FormatterType = "json_schema_registry" + + // JSONSchemaRegistry uses confluent's schema registry. It encodes a schemaID as the first 5 bytes and then json serializes (human readable) + // for the remaining part of the payload. It is the successor to `json_schema` which ships with zfmt, + JSONSchemaRegistry zfmt.FormatterType = "json_schema_registry" ) var errMissingFmtter = errors.New("custom formatter is missing, did you forget to call WithFormatter()") From d0f2875d572b7234dd890aa4710497b59873efa3 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sat, 21 Sep 2024 23:52:58 -0700 Subject: [PATCH 17/34] Added chema registry test --- test/schema_registry_test.go | 67 ++++++++++++++++++++++++++++++++++-- 1 file changed, 65 insertions(+), 2 deletions(-) diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 381e96d..9f498b7 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -10,6 +10,7 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/require" + "github.com/zillow/zfmt" "github.com/zillow/zkafka" "github.com/zillow/zkafka/test/evolution/avro1" "github.com/zillow/zkafka/test/evolution/avro2" @@ -218,8 +219,70 @@ func Test_SchemaRegistry_Avro_AutoRegisterSchemas_RequiresSchemaSpecification(t require.ErrorContains(t, err, "avro schema is required for schema registry formatter") } -func Test_SchemaNotRegistered_ImpactToWorker(t *testing.T) { - require.Fail(t, "implement") +// Test_SchemaNotRegistered_ResultsInWorkerDecodeError demonstrates the behavior when a worker reads +// a message for a schema that doesn't exist in shcema registry. This test shows that such a situation would result in a decode error +func Test_SchemaNotRegistered_ResultsInWorkerDecodeError(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer - no schema registration") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zfmt.AvroSchemaFmt, + SchemaID: 1, + }) + require.NoError(t, err) + + evt1 := avro1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1 + _, err = writer1.Write(ctx, evt1) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + var gotErr error + wf := zkafka.NewWorkFactory(client) + w := wf.CreateWithFunc(consumerTopicConfig, func(_ context.Context, msg *zkafka.Message) error { + defer cancel() + gotErr = msg.Decode(&avro1.DummyEvent{}) + return gotErr + }) + + t.Log("Begin reading messages") + err = w.Run(ctx, nil) + require.NoError(t, err) + require.ErrorContains(t, gotErr, "Subject Not Found") } func checkShouldSkipTest(t *testing.T, flags ...string) { From 4e0da8103f137e4cec749a615c9e21f0757ecb1e Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 08:23:36 -0700 Subject: [PATCH 18/34] Added `Test_DeadletterClientDoesntCollideWithProducer` --- test/integration_test.go | 136 +++++++++++++++++++++++++++++++++++++++ test/worker_test.go | 12 ++-- work.go | 2 +- 3 files changed, 143 insertions(+), 7 deletions(-) diff --git a/test/integration_test.go b/test/integration_test.go index 227cc26..49f60ee 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -8,6 +8,7 @@ import ( "os" "slices" "sync" + "sync/atomic" "testing" "time" @@ -1200,6 +1201,141 @@ func Test_WorkDelay_DoesntHaveDurationStackEffect(t *testing.T) { require.WithinDuration(t, last.processingInstant, first.processingInstant, time.Duration(processDelayMillis/2)*time.Millisecond, "Time since first and last processed message should be very short, since processing just updates an in memory slice. This should take on the order of microseconds, but to account for scheduling drift the assertion is half the delay") } +// Test_DeadletterClientDoesntCollideWithProducer tests a common configuration scenario +// where a worker consumer has a clientID and the dead letter producer is implicitily configured +// with a clientid. For example, say the client id waw `service-x`, previously, the deadletter producer +// inherited the same clientID and would create a publisher with that name. +// The issue was if the processor was acting as a connector and published to another topic, it might be +// common to have explicitly configured a producer with the name `service-x`. This +// resulted in a collission for the cached producer clients, and the effect was that all messages +// would be written to the topic that happened to be registered in the cient cache first (this would have +// been the dead letter producer). +// +// This test shows that when a connector processes N messages half of which error (deadletter) and half of which connect +// to an egress topic, that messages end up in both targets (as opposed to exclusively in the deadletter) +func Test_DeadletterClientDoesntCollideWithProducer(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + + bootstrapServer := getBootstrap() + + topicIngress := "integration-test-topic-1" + uuid.NewString() + createTopic(t, bootstrapServer, topicIngress, 1) + topicEgress := "integration-test-topic-2" + uuid.NewString() + createTopic(t, bootstrapServer, topicEgress, 1) + topicDLT := "integration-test-topic-dlt" + uuid.NewString() + createTopic(t, bootstrapServer, topicDLT, 1) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + writer, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topicIngress, + }) + clientID1 := fmt.Sprintf("service-x-%s-%s", t.Name(), uuid.NewString()) + ingressTopicReaderConfig := zkafka.ConsumerTopicConfig{ + ClientID: clientID1, + Topic: topicIngress, + GroupID: groupID, + // deadlettertopic uses implicit clientid + DeadLetterTopicConfig: &zkafka.ProducerTopicConfig{ + Topic: topicDLT, + }, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + processorWriter, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: clientID1, + Topic: topicEgress, + }) + + // start the reader before we write messages (otherwise, since its a new consumer group, auto.offset.reset=latest will be started at an offset later than the just written messages). + // Loop in the reader until msg1 appears + msg := Msg{Val: "1"} + + // write 3 messages to ingress topic + _, err = writer.Write(ctx, msg) + require.NoError(t, err) + _, err = writer.Write(ctx, msg) + require.NoError(t, err) + _, err = writer.Write(ctx, msg) + require.NoError(t, err) + + ctx, cancel := context.WithCancel(ctx) + defer cancel() + msgCount := atomic.Int64{} + wf := zkafka.NewWorkFactory(client, zkafka.WithWorkLifecycleHooks(zkafka.LifecycleHooks{ + PostProcessing: func(ctx context.Context, meta zkafka.LifecyclePostProcessingMeta) error { + if msgCount.Load() == 3 { + cancel() + } + + return nil + }, + })) + w := wf.CreateWithFunc(ingressTopicReaderConfig, func(ctx context.Context, msg *zkafka.Message) error { + t.Log("Processing message from ingress topic") + msgCount.Add(1) + if msgCount.Load()%2 == 0 { + return errors.New("random error occurred") + } + _, err := processorWriter.WriteRaw(ctx, nil, msg.Value()) + + return err + }) + + t.Log("Begin primary work loop") + err = w.Run(ctx, nil) + require.NoError(t, err) + t.Log("Exit primary work loop. Assess proper side effects") + + egressTopicReaderConfig := zkafka.ConsumerTopicConfig{ + ClientID: uuid.NewString(), + Topic: topicEgress, + GroupID: uuid.NewString(), + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + + ctx1, cancel1 := context.WithTimeout(context.Background(), time.Minute) + defer cancel1() + egressCount := atomic.Int64{} + w1 := wf.CreateWithFunc(egressTopicReaderConfig, func(_ context.Context, msg *zkafka.Message) error { + egressCount.Add(1) + cancel1() + return nil + }) + + dltTopicReaderConfig := zkafka.ConsumerTopicConfig{ + ClientID: uuid.NewString(), + Topic: topicDLT, + GroupID: uuid.NewString(), + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + ctx2, cancel2 := context.WithTimeout(context.Background(), time.Minute) + defer cancel2() + dltCount := atomic.Int64{} + w2 := wf.CreateWithFunc(dltTopicReaderConfig, func(_ context.Context, msg *zkafka.Message) error { + dltCount.Add(1) + cancel2() + return nil + }) + + t.Log("Start work running. Looking for egress event and DLT topic event") + require.NoError(t, w1.Run(ctx1, nil)) + require.NoError(t, w2.Run(ctx2, nil)) + require.Equal(t, int64(1), egressCount.Load(), "Expected a message to be written to the egress topic") + require.Equal(t, int64(1), dltCount.Load(), "Expected a message to be written to the DLT topic") +} + func createTopic(t *testing.T, bootstrapServer, topic string, partitions int) { t.Helper() aclient, err := kafka.NewAdminClient(&kafka.ConfigMap{"bootstrap.servers": bootstrapServer}) diff --git a/test/worker_test.go b/test/worker_test.go index f22584f..0d64521 100644 --- a/test/worker_test.go +++ b/test/worker_test.go @@ -2060,12 +2060,12 @@ func TestWork_ShutdownCausesRunExit(t *testing.T) { require.NoError(t, err) } -func Test_DeadeletterClientDoesntCollidWithProducer(t *testing.T) { - // work with deadletter clientid=123 - // producer clientid=123 - // This is a classic pattern. connecetor style. Shouldn't have issues with writes - t.Fail() -} +// func Test_DeadeletterClientDoesntCollidWithProducer(t *testing.T) { +// // work with deadletter clientid=123 +// // producer clientid=123 +// // This is a classic pattern. connecetor style. Shouldn't have issues with writes +// t.Fail() +// } func Test_MissingBootstrap_ShouldGiveClearError(t *testing.T) { t.Fail() diff --git a/work.go b/work.go index 3ac17e9..a0e1c93 100644 --- a/work.go +++ b/work.go @@ -664,7 +664,7 @@ func (f WorkFactory) Create(topicConfig ConsumerTopicConfig, processor processor if topicConfig.DeadLetterTopicConfig != nil { cfg := *topicConfig.DeadLetterTopicConfig if cfg.ClientID == "" { - cfg.ClientID = topicConfig.ClientID + cfg.ClientID = topicConfig.ClientID + "-auto-deadletter-publisher" } options = append(options, WithDeadLetterTopic(cfg)) } From 14f65897f58c56c6b8319f719f4b657230dc2d6f Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 13:25:08 -0700 Subject: [PATCH 19/34] Updated tests to provide bootstrap in config --- client_test.go | 59 +++++++++++++++++---------------- config.go | 7 ++-- lifecycle.go | 70 +++++++++++++++++++++++++++++++++++++--- reader.go | 5 ++- reader_test.go | 35 ++++++++------------ test/integration_test.go | 43 ++++++++++++++++++++++++ test/worker_test.go | 11 ------- work.go | 8 +++++ 8 files changed, 168 insertions(+), 70 deletions(-) diff --git a/client_test.go b/client_test.go index 3ea4ed4..ce6c0af 100644 --- a/client_test.go +++ b/client_test.go @@ -33,8 +33,8 @@ func TestNewClient(t *testing.T) { { name: "empty config", want: &Client{ - readers: make(map[string]Reader), - writers: make(map[string]Writer), + readers: make(map[string]*KReader), + writers: make(map[string]*KWriter), logger: NoopLogger{}, producerProvider: defaultConfluentProducerProvider{}.NewProducer, consumerProvider: defaultConfluentConsumerProvider{}.NewConsumer, @@ -51,8 +51,8 @@ func TestNewClient(t *testing.T) { conf: Config{ BootstrapServers: []string{"test"}, }, - readers: make(map[string]Reader), - writers: make(map[string]Writer), + readers: make(map[string]*KReader), + writers: make(map[string]*KWriter), logger: NoopLogger{}, producerProvider: defaultConfluentProducerProvider{}.NewProducer, consumerProvider: defaultConfluentConsumerProvider{}.NewConsumer, @@ -124,8 +124,8 @@ func TestClient_WithOptions(t *testing.T) { func TestClient_Reader(t *testing.T) { type fields struct { conf Config - readers map[string]Reader - writers map[string]Writer + readers map[string]*KReader + writers map[string]*KWriter logger Logger producerProvider confluentProducerProvider consumerProvider confluentConsumerProvider @@ -146,7 +146,7 @@ func TestClient_Reader(t *testing.T) { name: "create new KReader with overridden Brokers, error from consumer provider", fields: fields{ consumerProvider: mockConfluentConsumerProvider{err: true}.NewConsumer, - readers: make(map[string]Reader), + readers: make(map[string]*KReader), }, args: args{ topicConfig: ConsumerTopicConfig{ @@ -162,7 +162,7 @@ func TestClient_Reader(t *testing.T) { name: "create new KReader with bad formatter", fields: fields{ consumerProvider: mockConfluentConsumerProvider{err: false}.NewConsumer, - readers: make(map[string]Reader), + readers: make(map[string]*KReader), }, args: args{ topicConfig: ConsumerTopicConfig{ @@ -178,7 +178,8 @@ func TestClient_Reader(t *testing.T) { { name: "create new KReader for closed KReader", fields: fields{ - readers: map[string]Reader{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, + readers: map[string]*KReader{ "test-config": &KReader{isClosed: true}, }, consumerProvider: mockConfluentConsumerProvider{c: MockKafkaConsumer{ID: "stew"}}.NewConsumer, @@ -210,7 +211,8 @@ func TestClient_Reader(t *testing.T) { { name: "create new KReader for closed KReader with default overrides", fields: fields{ - readers: map[string]Reader{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, + readers: map[string]*KReader{ "test-config": &KReader{isClosed: true}, }, consumerProvider: mockConfluentConsumerProvider{c: MockKafkaConsumer{ID: "stew"}}.NewConsumer, @@ -250,7 +252,7 @@ func TestClient_Reader(t *testing.T) { { name: "get from cache", fields: fields{ - readers: map[string]Reader{ + readers: map[string]*KReader{ "test-config": &KReader{}, }, }, @@ -303,8 +305,8 @@ func TestClient_Reader(t *testing.T) { func TestClient_Writer(t *testing.T) { type fields struct { conf Config - readers map[string]Reader - writers map[string]Writer + readers map[string]*KReader + writers map[string]*KWriter logger Logger producerProvider confluentProducerProvider } @@ -324,7 +326,7 @@ func TestClient_Writer(t *testing.T) { name: "create new KWriter with overridden Brokers, error from producer provider", fields: fields{ producerProvider: mockConfluentProducerProvider{err: true}.NewProducer, - writers: make(map[string]Writer), + writers: make(map[string]*KWriter), conf: Config{ SaslUsername: ptr("test-user"), SaslPassword: ptr("test-password"), @@ -342,7 +344,7 @@ func TestClient_Writer(t *testing.T) { { name: "create new KWriter for closed writer", fields: fields{ - writers: map[string]Writer{ + writers: map[string]*KWriter{ "test-id": &KWriter{isClosed: true}, }, producerProvider: mockConfluentProducerProvider{}.NewProducer, @@ -371,7 +373,7 @@ func TestClient_Writer(t *testing.T) { { name: "create new KWriter for closed writer with default overrides", fields: fields{ - writers: map[string]Writer{ + writers: map[string]*KWriter{ "test-id": &KWriter{isClosed: true}, }, producerProvider: mockConfluentProducerProvider{}.NewProducer, @@ -407,7 +409,7 @@ func TestClient_Writer(t *testing.T) { { name: "get from cache", fields: fields{ - writers: map[string]Writer{ + writers: map[string]*KWriter{ "test-id": &KWriter{}, }, }, @@ -453,15 +455,15 @@ func TestClient_Close(t *testing.T) { type fields struct { Mutex *sync.Mutex conf Config - readers map[string]Reader - writers map[string]Writer + readers map[string]*KReader + writers map[string]*KWriter } m := mockConfluentConsumerProvider{ c: mockConsumer, }.NewConsumer r1, err := newReader(readerArgs{ - cfg: Config{}, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: ConsumerTopicConfig{ Formatter: zfmt.StringFmt, }, @@ -471,7 +473,7 @@ func TestClient_Close(t *testing.T) { }) require.NoError(t, err) r2, err := newReader(readerArgs{ - cfg: Config{}, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: ConsumerTopicConfig{ Formatter: zfmt.StringFmt, }, @@ -493,11 +495,11 @@ func TestClient_Close(t *testing.T) { name: "with readers/writers => no error", wantErr: true, fields: fields{ - readers: map[string]Reader{ + readers: map[string]*KReader{ "r1": r1, "r2": r2, }, - writers: map[string]Writer{ + writers: map[string]*KWriter{ "w1": &KWriter{producer: p}, "w2": &KWriter{producer: p}, }, @@ -519,14 +521,10 @@ func TestClient_Close(t *testing.T) { require.NoError(t, err) } for _, w := range c.writers { - kw, ok := w.(*KWriter) - require.True(t, ok, "Expected writer to be KWriter") - require.True(t, kw.isClosed, "clients writer should be closed") + require.True(t, w.isClosed, "clients writer should be closed") } for _, r := range c.readers { - kr, ok := r.(*KReader) - require.True(t, ok, "Expected reader to be KReader") - require.True(t, kr.isClosed, "clients reader should be closed") + require.True(t, r.isClosed, "clients reader should be closed") } }) } @@ -997,7 +995,8 @@ func Test_makeConfig_Consumer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got := makeConsumerConfig(tt.args.conf, tt.args.topicConfig, tt.args.prefix) + got, err := makeConsumerConfig(tt.args.conf, tt.args.topicConfig, tt.args.prefix) + require.NoError(t, err) assertEqual(t, got, tt.want) }) } diff --git a/config.go b/config.go index fd093f6..7c21684 100644 --- a/config.go +++ b/config.go @@ -284,7 +284,7 @@ func getDefaultProducerTopicConfig(topicConfig *ProducerTopicConfig) error { // makeConsumerConfig creates a kafka configMap from the specified strongly typed Config and TopicConfig. // TopicConfig specifies a way to specify config values that aren't strongly typed via AdditionalProps field. // Those values are overwritten if specified in strongly typed TopicConfig fields. -func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix string) kafka.ConfigMap { +func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix string) (kafka.ConfigMap, error) { configMap := kafka.ConfigMap{} configMap[clientID] = topicConfig.ClientID @@ -322,6 +322,9 @@ func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix str if len(topicConfig.BootstrapServers) != 0 { addresses = topicConfig.BootstrapServers } + if len(addresses) == 0 { + return nil, errors.New("invalid config, missing bootstrap server addresses") + } configMap[bootstrapServers] = strings.Join(addresses, ",") saslUname := conf.SaslUsername @@ -353,7 +356,7 @@ func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix str configMap[key] = kafka.ConfigValue(v) } } - return configMap + return configMap, nil } // makeProducerConfig creates a kafka configMap from the specified strongly typed Config and TopicConfig. diff --git a/lifecycle.go b/lifecycle.go index 3e5d414..50bd273 100644 --- a/lifecycle.go +++ b/lifecycle.go @@ -9,9 +9,16 @@ import ( type LifecyclePostReadMeta struct { Topic string GroupID string - // Message that was read + // Message that was read (will be non nil) Message *Message } + +type LifecyclePostReadImmediateMeta struct { + // Message that was read (could be nil) + Message *Message + Err error +} + type LifecyclePreProcessingMeta struct { Topic string GroupID string @@ -47,9 +54,11 @@ type LifecyclePreWriteResp struct { } type LifecycleHooks struct { - // Called by work after reading a message, offers the ability to customize the context object (resulting context object passed to work processor) + // Called by work after reading a message (guaranteed non nil), offers the ability to customize the context object (resulting context object passed to work processor) PostRead func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) + PostReadImmediate func(ctx context.Context, meta LifecyclePostReadImmediateMeta) + // Called after receiving a message and before processing it. PreProcessing func(ctx context.Context, meta LifecyclePreProcessingMeta) (context.Context, error) @@ -78,6 +87,32 @@ func ChainLifecycleHooks(hooks ...LifecycleHooks) LifecycleHooks { return hooks[0] } return LifecycleHooks{ + PostRead: func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) { + var allErrs error + + hookCtx := ctx + + for _, h := range hooks { + if h.PostRead != nil { + var err error + + hookCtx, err = h.PostRead(hookCtx, meta) + if err != nil { + allErrs = errors.Join(allErrs, err) + } + } + } + + return hookCtx, allErrs + + }, + PostReadImmediate: func(ctx context.Context, meta LifecyclePostReadImmediateMeta) { + for _, h := range hooks { + if h.PostRead != nil { + h.PostReadImmediate(ctx, meta) + } + } + }, PreProcessing: func(ctx context.Context, meta LifecyclePreProcessingMeta) (context.Context, error) { var allErrs error @@ -96,7 +131,6 @@ func ChainLifecycleHooks(hooks ...LifecycleHooks) LifecycleHooks { return hookCtx, allErrs }, - PostProcessing: func(ctx context.Context, meta LifecyclePostProcessingMeta) error { var allErrs error @@ -111,7 +145,6 @@ func ChainLifecycleHooks(hooks ...LifecycleHooks) LifecycleHooks { return allErrs }, - PostAck: func(ctx context.Context, meta LifecyclePostAckMeta) error { var allErrs error @@ -126,5 +159,34 @@ func ChainLifecycleHooks(hooks ...LifecycleHooks) LifecycleHooks { return allErrs }, + PreWrite: func(ctx context.Context, meta LifecyclePreWriteMeta) (LifecyclePreWriteResp, error) { + var allErrs error + + out := LifecyclePreWriteResp{ + Headers: make(map[string][]byte), + } + for _, h := range hooks { + if h.PreProcessing != nil { + var err error + + resp, err := h.PreWrite(ctx, meta) + if err != nil { + allErrs = errors.Join(allErrs, err) + } + for k, v := range resp.Headers { + out.Headers[k] = v + } + } + } + + return out, allErrs + }, + PostFanout: func(ctx context.Context) { + for _, h := range hooks { + if h.PostRead != nil { + h.PostFanout(ctx) + } + } + }, } } diff --git a/reader.go b/reader.go index f2197e8..d203979 100644 --- a/reader.go +++ b/reader.go @@ -71,7 +71,10 @@ func newReader(args readerArgs) (*KReader, error) { formatter := args.f logger := args.l - confluentConfig := makeConsumerConfig(conf, topicConfig, prefix) + confluentConfig, err := makeConsumerConfig(conf, topicConfig, prefix) + if err != nil { + return nil, err + } consumer, err := provider(confluentConfig) if err != nil { return nil, err diff --git a/reader_test.go b/reader_test.go index 9cbe688..52bb81b 100644 --- a/reader_test.go +++ b/reader_test.go @@ -32,7 +32,7 @@ func TestReader_Read_NilReturn(t *testing.T) { c: mockConsumer, }.NewConsumer args := readerArgs{ - cfg: Config{}, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &NoopLogger{}, @@ -71,8 +71,7 @@ func TestReader_Read(t *testing.T) { require.NoError(t, err) args := readerArgs{ - cfg: Config{}, - cCfg: topicConfig, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &NoopLogger{}, f: f, @@ -115,7 +114,7 @@ func TestReader_Read_Error(t *testing.T) { c: mockConsumer, }.NewConsumer args := readerArgs{ - cfg: Config{}, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &NoopLogger{}, @@ -154,7 +153,7 @@ func TestReader_Read_TimeoutError(t *testing.T) { c: mockConsumer, }.NewConsumer args := readerArgs{ - cfg: Config{}, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &NoopLogger{}, @@ -180,8 +179,7 @@ func TestReader_Read_SubscriberError(t *testing.T) { c: mockConsumer, }.NewConsumer args := readerArgs{ - cfg: Config{}, - cCfg: topicConfig, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &NoopLogger{}, } @@ -207,8 +205,7 @@ func TestReader_Read_CloseError(t *testing.T) { c: mockConsumer, }.NewConsumer args := readerArgs{ - cfg: Config{}, - cCfg: topicConfig, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &l, } @@ -234,8 +231,7 @@ func TestReader_ReadWhenConnectionIsClosed(t *testing.T) { c: mockConsumer, }.NewConsumer args := readerArgs{ - cfg: Config{}, - cCfg: topicConfig, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &NoopLogger{}, } @@ -262,6 +258,7 @@ func Test_newReader(t *testing.T) { { name: "custom formatter, no error. It is implied that user will supply formatter later", args: args{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, topicConfig: ConsumerTopicConfig{ Formatter: zfmt.FormatterType("custom"), }, @@ -269,16 +266,6 @@ func Test_newReader(t *testing.T) { }, wantErr: false, }, - //{ - // name: "invalid formatter", - // args: args{ - // consumeProvider: defaultConfluentConsumerProvider{}.NewConsumer, - // topicConfig: ConsumerTopicConfig{ - // Formatter: zfmt.FormatterType("invalid_fmt"), - // }, - // }, - // wantErr: true, - //}, { name: "valid formatter but has error when creating NewConsumer", args: args{ @@ -289,6 +276,7 @@ func Test_newReader(t *testing.T) { { name: "minimum config with formatter", args: args{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, consumeProvider: defaultConfluentConsumerProvider{}.NewConsumer, topicConfig: ConsumerTopicConfig{ Formatter: zfmt.StringFmt, @@ -340,7 +328,7 @@ func Test_ProcessMessage(t *testing.T) { c: mock_confluent.NewMockKafkaConsumer(ctrl), }.NewConsumer args := readerArgs{ - cfg: Config{}, + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &NoopLogger{}, @@ -380,6 +368,7 @@ func Test_ProcessMultipleMessagesFromDifferentTopics_UpdatesInternalStateProperl }.NewConsumer args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &l, @@ -427,6 +416,7 @@ func Test_ProcessMessage_StoreOffsetError(t *testing.T) { c: mockConsumer, }.NewConsumer args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &l, @@ -480,6 +470,7 @@ func Test_ProcessMessage_SetError(t *testing.T) { c: mockConsumer, }.NewConsumer args := readerArgs{ + cfg: Config{BootstrapServers: []string{"localhost:9092"}}, cCfg: topicConfig, consumerProvider: m, l: &l, diff --git a/test/integration_test.go b/test/integration_test.go index 49f60ee..2e8c73e 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -1336,6 +1336,49 @@ func Test_DeadletterClientDoesntCollideWithProducer(t *testing.T) { require.Equal(t, int64(1), dltCount.Load(), "Expected a message to be written to the DLT topic") } +func Test_MissingBootstrap_ShouldGiveClearError(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{}}, + zkafka.LoggerOption(stdLogger{}), + ) + defer func() { require.NoError(t, client.Close()) }() + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + ctx, cancel := context.WithTimeout(context.Background(), 100*time.Second) + defer cancel() + + var readErr error + wf := zkafka.NewWorkFactory(client, zkafka.WithLogger(stdLogger{}), + zkafka.WithWorkLifecycleHooks(zkafka.LifecycleHooks{ + PostReadImmediate: func(ctx context.Context, meta zkafka.LifecyclePostReadImmediateMeta) { + readErr = meta.Err + cancel() + }, + }), + ) + w := wf.CreateWithFunc(consumerTopicConfig, func(_ context.Context, msg *zkafka.Message) error { + return nil + }) + err := w.Run(context.Background(), ctx.Done()) + require.NoError(t, err) + require.ErrorContains(t, readErr, "invalid config, missing bootstrap server addresses") +} + func createTopic(t *testing.T, bootstrapServer, topic string, partitions int) { t.Helper() aclient, err := kafka.NewAdminClient(&kafka.ConfigMap{"bootstrap.servers": bootstrapServer}) diff --git a/test/worker_test.go b/test/worker_test.go index 0d64521..08d4758 100644 --- a/test/worker_test.go +++ b/test/worker_test.go @@ -2060,17 +2060,6 @@ func TestWork_ShutdownCausesRunExit(t *testing.T) { require.NoError(t, err) } -// func Test_DeadeletterClientDoesntCollidWithProducer(t *testing.T) { -// // work with deadletter clientid=123 -// // producer clientid=123 -// // This is a classic pattern. connecetor style. Shouldn't have issues with writes -// t.Fail() -// } - -func Test_MissingBootstrap_ShouldGiveClearError(t *testing.T) { - t.Fail() -} - func Test_FailedAuthentication(t *testing.T) { // not sure if we can even do this with the local broker // See if can require username/password for dynamically provisioned topic diff --git a/work.go b/work.go index a0e1c93..7bfdce9 100644 --- a/work.go +++ b/work.go @@ -204,6 +204,14 @@ func (w *Work) fanOut(ctx context.Context, shutdown <-chan struct{}) { return } msg, err := w.readMessage(ctx, shutdown) + + if w.lifecycle.PostReadImmediate != nil { + w.lifecycle.PostReadImmediate(ctx, LifecyclePostReadImmediateMeta{ + Message: msg, + Err: err, + }) + } + if err != nil { w.logger.Warnw(ctx, "Kafka worker read message failed", "error", err, From 837a16e4dc04c7d441c3a981c95295d3152eb481 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 13:54:57 -0700 Subject: [PATCH 20/34] Updated --- client.go | 13 +++++++++---- client_test.go | 29 +++++++++++++++++------------ config.go | 2 +- test/worker_test.go | 7 ------- work.go | 2 +- 5 files changed, 28 insertions(+), 25 deletions(-) diff --git a/client.go b/client.go index d2908ae..bb0bbcc 100644 --- a/client.go +++ b/client.go @@ -113,8 +113,9 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts if err != nil { return nil, err } + writerKey := getWriterKey(topicConfig) c.mu.RLock() - w, exist := c.writers[topicConfig.ClientID] + w, exist := c.writers[writerKey] if exist && !w.isClosed { c.mu.RUnlock() return w, nil @@ -123,7 +124,7 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts c.mu.Lock() defer c.mu.Unlock() - w, exist = c.writers[topicConfig.ClientID] + w, exist = c.writers[writerKey] if exist && !w.isClosed { return w, nil } @@ -151,8 +152,8 @@ func (c *Client) Writer(_ context.Context, topicConfig ProducerTopicConfig, opts return nil, err } - c.writers[topicConfig.ClientID] = writer - return c.writers[topicConfig.ClientID], nil + c.writers[writerKey] = writer + return c.writers[writerKey], nil } // Close terminates all cached readers and writers gracefully. @@ -218,3 +219,7 @@ func getTracer(tp trace.TracerProvider) trace.Tracer { } return tp.Tracer(instrumentationName, trace.WithInstrumentationVersion("v1.0.0")) } + +func getWriterKey(cfg ProducerTopicConfig) string { + return cfg.ClientID + "-" + cfg.Topic +} diff --git a/client_test.go b/client_test.go index ce6c0af..144e083 100644 --- a/client_test.go +++ b/client_test.go @@ -410,7 +410,7 @@ func TestClient_Writer(t *testing.T) { name: "get from cache", fields: fields{ writers: map[string]*KWriter{ - "test-id": &KWriter{}, + "test-id-topic": &KWriter{}, }, }, args: args{ @@ -1020,9 +1020,7 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", - Topic: "", - Formatter: "", - SchemaID: 0, + Topic: "yyy", Transaction: true, }, }, @@ -1031,7 +1029,7 @@ func Test_makeConfig_Producer(t *testing.T) { "enable.idempotence": true, "request.required.acks": -1, "max.in.flight.requests.per.connection": 1, - "client.id": "clientid", + "client.id": "clientid-yyy", "linger.ms": 0, }, }, @@ -1043,12 +1041,13 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "zzz", DeliveryTimeoutMs: ptr(100), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080,https://localhost:8081", - "client.id": "clientid", + "client.id": "clientid-zzz", "delivery.timeout.ms": 100, "enable.idempotence": true, "linger.ms": 0, @@ -1062,6 +1061,7 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "zzz", DeliveryTimeoutMs: ptr(100), AdditionalProps: map[string]any{ "stewarts.random.property.not.included.in.topicconfig": 123, @@ -1071,7 +1071,7 @@ func Test_makeConfig_Producer(t *testing.T) { want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", "enable.idempotence": true, - "client.id": "clientid", + "client.id": "clientid-zzz", "delivery.timeout.ms": 100, "stewarts.random.property.not.included.in.topicconfig": 123, "linger.ms": 0, @@ -1087,6 +1087,7 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "abc", DeliveryTimeoutMs: ptr(100), EnableIdempotence: ptr(false), RequestRequiredAcks: ptr("all"), @@ -1098,7 +1099,7 @@ func Test_makeConfig_Producer(t *testing.T) { }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-abc", "enable.idempotence": false, "delivery.timeout.ms": 100, "auto.commit.interval.ms": 20, @@ -1120,12 +1121,13 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "xxx", SaslUsername: ptr(""), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-xxx", "enable.idempotence": true, "linger.ms": 0, }, @@ -1140,12 +1142,13 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "xxx", SaslUsername: ptr("usernameOverride"), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-xxx", "enable.idempotence": true, "sasl.mechanism": "SCRAM-SHA-256", "sasl.password": "password", @@ -1164,12 +1167,13 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "xxx", SaslPassword: ptr("passwordOverride"), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-xxx", "enable.idempotence": true, "sasl.mechanism": "SCRAM-SHA-256", "sasl.password": "passwordOverride", @@ -1188,13 +1192,14 @@ func Test_makeConfig_Producer(t *testing.T) { }, topicConfig: ProducerTopicConfig{ ClientID: "clientid", + Topic: "xxx", SaslUsername: ptr("usernameOverride"), SaslPassword: ptr("passwordOverride"), }, }, want: kafka.ConfigMap{ "bootstrap.servers": "http://localhost:8080", - "client.id": "clientid", + "client.id": "clientid-xxx", "enable.idempotence": true, "sasl.mechanism": "SCRAM-SHA-256", "sasl.password": "passwordOverride", diff --git a/config.go b/config.go index 7c21684..dace81a 100644 --- a/config.go +++ b/config.go @@ -365,7 +365,7 @@ func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix str func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) kafka.ConfigMap { configMap := kafka.ConfigMap{} - configMap[clientID] = topicConfig.ClientID + configMap[clientID] = getWriterKey(topicConfig) if topicConfig.RequestRequiredAcks != nil { configMap[requestRequiredAcks] = *topicConfig.RequestRequiredAcks diff --git a/test/worker_test.go b/test/worker_test.go index 08d4758..d1334c9 100644 --- a/test/worker_test.go +++ b/test/worker_test.go @@ -2060,13 +2060,6 @@ func TestWork_ShutdownCausesRunExit(t *testing.T) { require.NoError(t, err) } -func Test_FailedAuthentication(t *testing.T) { - // not sure if we can even do this with the local broker - // See if can require username/password for dynamically provisioned topic - t.Fail() - -} - // $ go test -run=XXX -bench=BenchmarkWork_Run_CircuitBreaker_BusyLoopBreaker -cpuprofile profile_cpu.out // $ go tool pprof --web profile_cpu.out // $ go tool pprof -http=":8000" test.test ./profile_cpu.out diff --git a/work.go b/work.go index 7bfdce9..b676bc9 100644 --- a/work.go +++ b/work.go @@ -672,7 +672,7 @@ func (f WorkFactory) Create(topicConfig ConsumerTopicConfig, processor processor if topicConfig.DeadLetterTopicConfig != nil { cfg := *topicConfig.DeadLetterTopicConfig if cfg.ClientID == "" { - cfg.ClientID = topicConfig.ClientID + "-auto-deadletter-publisher" + cfg.ClientID = topicConfig.ClientID } options = append(options, WithDeadLetterTopic(cfg)) } From 4b9c18410711d237120426997912541614f50d25 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 17:47:58 -0700 Subject: [PATCH 21/34] Fixed linter --- .golangci.yml | 11 +++++------ schemareg.go | 5 ++++- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index 33a5523..29a8677 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,11 +1,10 @@ run: - skip-dirs: - - docs - - datadog - - kustomize - skip-files: - - 'wire_gen.go' tests: false +issues: + exclude-files: + - 'wire_gen.go' + exclude-dirs: + - docs linters-settings: errcheck: check-type-assertions: true diff --git a/schemareg.go b/schemareg.go index f6bc3bc..dbca923 100644 --- a/schemareg.go +++ b/schemareg.go @@ -13,7 +13,7 @@ import ( ) type schemaRegistryFactory struct { - mmu sync.Mutex + m sync.Mutex srCls map[string]schemaregistry.Client } @@ -104,6 +104,9 @@ func (c *schemaRegistryFactory) createJson(srConfig SchemaRegistryConfig) (jsonF } func (c *schemaRegistryFactory) getSchemaClient(srConfig SchemaRegistryConfig) (schemaregistry.Client, error) { + c.m.Lock() + defer c.m.Unlock() + url := srConfig.URL if url == "" { return nil, errors.New("no schema registry url provided") From ac8385152fb0e04a5349478041ded7526c47072c Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 19:18:17 -0700 Subject: [PATCH 22/34] Fixed linter warnings --- .golangci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index 29a8677..bc3b462 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,5 +1,6 @@ run: tests: false + go: '1.22' issues: exclude-files: - 'wire_gen.go' @@ -13,8 +14,6 @@ linters-settings: sections: - standard - default - gosimple: - go: '1.17' depguard: rules: Main: From 8ac8970597d877df2e1f36a986c1bdb13096ecb6 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 19:49:21 -0700 Subject: [PATCH 23/34] Added lifecycle tests --- lifecycle_test.go | 160 ++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 142 insertions(+), 18 deletions(-) diff --git a/lifecycle_test.go b/lifecycle_test.go index 2ba609e..180585e 100644 --- a/lifecycle_test.go +++ b/lifecycle_test.go @@ -11,6 +11,13 @@ func Test_LifecycleChainedHooksAreCalled(t *testing.T) { lhState := make(map[string]int) // Map from state to number of times called hooks1 := LifecycleHooks{ + PostRead: func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) { + lhState["hooks1-post-read"] += 1 + return ctx, nil + }, + PostReadImmediate: func(ctx context.Context, meta LifecyclePostReadImmediateMeta) { + lhState["hooks1-post-read-immediate"] += 1 + }, PreProcessing: func(ctx context.Context, meta LifecyclePreProcessingMeta) (context.Context, error) { lhState["hooks1-pre-processing"] += 1 return ctx, nil @@ -23,9 +30,23 @@ func Test_LifecycleChainedHooksAreCalled(t *testing.T) { lhState["hooks1-post-ack"] += 1 return nil }, + PreWrite: func(ctx context.Context, meta LifecyclePreWriteMeta) (LifecyclePreWriteResp, error) { + lhState["hooks1-pre-write"] += 1 + return LifecyclePreWriteResp{}, nil + }, + PostFanout: func(ctx context.Context) { + lhState["hooks1-post-fanout"] += 1 + }, } hooks2 := LifecycleHooks{ + PostRead: func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) { + lhState["hooks2-post-read"] += 1 + return ctx, nil + }, + PostReadImmediate: func(ctx context.Context, meta LifecyclePostReadImmediateMeta) { + lhState["hooks2-post-read-immediate"] += 1 + }, PreProcessing: func(ctx context.Context, meta LifecyclePreProcessingMeta) (context.Context, error) { lhState["hooks2-pre-processing"] += 1 return ctx, nil @@ -38,31 +59,134 @@ func Test_LifecycleChainedHooksAreCalled(t *testing.T) { lhState["hooks2-post-ack"] += 1 return nil }, + PreWrite: func(ctx context.Context, meta LifecyclePreWriteMeta) (LifecyclePreWriteResp, error) { + lhState["hooks2-pre-write"] += 1 + return LifecyclePreWriteResp{}, nil + }, + PostFanout: func(ctx context.Context) { + lhState["hooks2-post-fanout"] += 1 + }, } lh := ChainLifecycleHooks(hooks1, hooks2) lh.PreProcessing(context.Background(), LifecyclePreProcessingMeta{}) - require.Equal(t, 1, lhState["hooks1-pre-processing"], "hooks1-pre-processing not called") - require.Equal(t, 1, lhState["hooks2-pre-processing"], "hooks2-pre-processing not called") - require.Equal(t, 0, lhState["hooks1-post-processing"], "hooks1-post-processing called") - require.Equal(t, 0, lhState["hooks2-post-processing"], "hooks2-post-processing called") - require.Equal(t, 0, lhState["hooks1-post-ack"], "hooks1-post-ack called") - require.Equal(t, 0, lhState["hooks2-post-ack"], "hooks2-post-ack called") + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 0, lhState["hooks1-post-processing"]) + require.Equal(t, 0, lhState["hooks2-post-processing"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-post-read"]) + require.Equal(t, 0, lhState["hooks2-post-read"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-pre-write"]) + require.Equal(t, 0, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) lh.PostProcessing(context.Background(), LifecyclePostProcessingMeta{}) - require.Equal(t, 1, lhState["hooks1-pre-processing"], "hooks1-pre-processing not called") - require.Equal(t, 1, lhState["hooks2-pre-processing"], "hooks2-pre-processing not called") - require.Equal(t, 1, lhState["hooks1-post-processing"], "hooks1-post-processing not called") - require.Equal(t, 1, lhState["hooks2-post-processing"], "hooks2-post-processing not called") - require.Equal(t, 0, lhState["hooks1-post-ack"], "hooks1-post-ack called") - require.Equal(t, 0, lhState["hooks2-post-ack"], "hooks2-post-ack called") + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-post-read"]) + require.Equal(t, 0, lhState["hooks2-post-read"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-pre-write"]) + require.Equal(t, 0, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) + + lh.PostRead(context.Background(), LifecyclePostReadMeta{}) + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-post-ack"]) + require.Equal(t, 0, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-pre-write"]) + require.Equal(t, 0, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) lh.PostAck(context.Background(), LifecyclePostAckMeta{}) - require.Equal(t, 1, lhState["hooks1-pre-processing"], "hooks1-pre-processing not called") - require.Equal(t, 1, lhState["hooks2-pre-processing"], "hooks2-pre-processing not called") - require.Equal(t, 1, lhState["hooks1-post-processing"], "hooks1-post-processing not called") - require.Equal(t, 1, lhState["hooks2-post-processing"], "hooks2-post-processing not called") - require.Equal(t, 1, lhState["hooks1-post-ack"], "hooks1-post-ack not called") - require.Equal(t, 1, lhState["hooks2-post-ack"], "hooks2-post-ack not called") + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 0, lhState["hooks1-pre-write"]) + require.Equal(t, 0, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) + + lh.PreWrite(context.Background(), LifecyclePreWriteMeta{}) + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 1, lhState["hooks1-pre-write"]) + require.Equal(t, 1, lhState["hooks2-pre-write"]) + require.Equal(t, 0, lhState["hooks1-post-fanout"]) + require.Equal(t, 0, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) + + lh.PostFanout(context.Background()) + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 1, lhState["hooks1-pre-write"]) + require.Equal(t, 1, lhState["hooks2-pre-write"]) + require.Equal(t, 1, lhState["hooks1-post-fanout"]) + require.Equal(t, 1, lhState["hooks2-post-fanout"]) + require.Equal(t, 0, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 0, lhState["hooks2-post-read-immediate"]) + + lh.PostReadImmediate(context.Background(), LifecyclePostReadImmediateMeta{}) + require.Equal(t, 1, lhState["hooks1-pre-processing"]) + require.Equal(t, 1, lhState["hooks2-pre-processing"]) + require.Equal(t, 1, lhState["hooks1-post-processing"]) + require.Equal(t, 1, lhState["hooks2-post-processing"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 1, lhState["hooks1-post-read"]) + require.Equal(t, 1, lhState["hooks2-post-read"]) + require.Equal(t, 1, lhState["hooks1-post-ack"]) + require.Equal(t, 1, lhState["hooks2-post-ack"]) + require.Equal(t, 1, lhState["hooks1-pre-write"]) + require.Equal(t, 1, lhState["hooks2-pre-write"]) + require.Equal(t, 1, lhState["hooks1-post-fanout"]) + require.Equal(t, 1, lhState["hooks2-post-fanout"]) + require.Equal(t, 1, lhState["hooks1-post-read-immediate"]) + require.Equal(t, 1, lhState["hooks2-post-read-immediate"]) } From 9412096f71e412c233f598623a6ff733b858e96a Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 19:51:57 -0700 Subject: [PATCH 24/34] Added tests which use Proto/JSON schema registry that run in pipeline --- test/schema_registry_test.go | 224 ++++++++++++++++++++++++++++++++++- 1 file changed, 223 insertions(+), 1 deletion(-) diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 9f498b7..0a9438e 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -8,12 +8,17 @@ import ( "os" "testing" + "github.com/google/go-cmp/cmp/cmpopts" "github.com/google/uuid" "github.com/stretchr/testify/require" "github.com/zillow/zfmt" "github.com/zillow/zkafka" "github.com/zillow/zkafka/test/evolution/avro1" "github.com/zillow/zkafka/test/evolution/avro2" + "github.com/zillow/zkafka/test/evolution/json1" + "github.com/zillow/zkafka/test/evolution/json2" + "github.com/zillow/zkafka/test/evolution/proto1" + "github.com/zillow/zkafka/test/evolution/proto2" ) //go:embed evolution/schema_1.avsc @@ -25,7 +30,7 @@ var dummyEventSchema2 string const enableSchemaRegistryTest = "ENABLE_SCHEMA_REGISTRY_TESTS" const enableKafkaBrokerTest = "ENABLE_KAFKA_BROKER_TESTS" -func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { +func Test_SchemaRegistry_Avro_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { checkShouldSkipTest(t, enableKafkaBrokerTest) ctx := context.Background() @@ -136,6 +141,223 @@ func Test_SchemaRegistry_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegis assertEqual(t, evt2, receivedEvt2Schema2) } +func Test_SchemaRegistry_Proto_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := proto1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := proto2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := proto1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema1 := proto1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := proto1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema2 := proto2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(proto2.DummyEvent{})) +} + +func Test_SchemaRegistry_JSON_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := json1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := json2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := json1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema1 := json1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := json1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema2 := json2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(json2.DummyEvent{})) +} + func Test_SchemaRegistry_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { checkShouldSkipTest(t, enableKafkaBrokerTest) From a4e45358214bea3c986f009140095b70f037f49c Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 20:03:18 -0700 Subject: [PATCH 25/34] Added some comments --- lifecycle.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lifecycle.go b/lifecycle.go index 50bd273..86156e2 100644 --- a/lifecycle.go +++ b/lifecycle.go @@ -57,6 +57,8 @@ type LifecycleHooks struct { // Called by work after reading a message (guaranteed non nil), offers the ability to customize the context object (resulting context object passed to work processor) PostRead func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) + // Called by work immediatedly after an attempt to read a message. Msg might be nil, if there was an error + // or no available messages. PostReadImmediate func(ctx context.Context, meta LifecyclePostReadImmediateMeta) // Called after receiving a message and before processing it. From ac2ca688816f66bffd2e105c1c66fb7c6986fca6 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 20:41:16 -0700 Subject: [PATCH 26/34] Updated some stuff --- client.go | 6 + coverage.sh | 6 +- formatter.go | 29 +-- message.go | 2 +- test/schema_registry_evo_test.go | 331 ++++++++++++++++++++++++++++++ test/schema_registry_test.go | 334 ------------------------------- test/worker_test.go | 13 +- testhelper.go | 3 + work.go | 2 +- work_test.go | 21 +- 10 files changed, 368 insertions(+), 379 deletions(-) diff --git a/client.go b/client.go index bb0bbcc..6e44ee9 100644 --- a/client.go +++ b/client.go @@ -223,3 +223,9 @@ func getTracer(tp trace.TracerProvider) trace.Tracer { func getWriterKey(cfg ProducerTopicConfig) string { return cfg.ClientID + "-" + cfg.Topic } + +type formatterArgs struct { + formatter zfmt.FormatterType + schemaID int + srCfg SchemaRegistryConfig +} diff --git a/coverage.sh b/coverage.sh index b1a748e..4485c4b 100755 --- a/coverage.sh +++ b/coverage.sh @@ -27,11 +27,11 @@ function quit() { } # change to example directory for execution (because it uses hardcoded filepaths, and the testable # examples don't work when executed outside of that directory -go test -c -coverpkg=$pck1 -covermode=atomic -o "$root_res" $pck1 +go test --tags=evolution_test -c -coverpkg=$pck1 -covermode=atomic -o "$root_res" $pck1 # convert binary to go formatted -go tool test2json -t "$root_res" -test.v -test.coverprofile "$root_out" +go tool test2json -t "$root_res" -test.v -test.coverprofile "$root_out" -go test -c -coverpkg=$pck1 -covermode=atomic -o "$source_res" $pck2 +go test --tags=evolution_test -c -coverpkg=$pck1 -covermode=atomic -o "$source_res" $pck2 go tool test2json -t "$source_res" -test.v -test.coverprofile "$source_out" # delete aggregate file diff --git a/formatter.go b/formatter.go index 2489652..5c95692 100644 --- a/formatter.go +++ b/formatter.go @@ -33,27 +33,38 @@ type Formatter interface { } type marshReq struct { + // topic is the kafka topic being written to topic string // subject is the data to be marshalled subject any - // schema + // schema is currently only used for avro schematizations. It is necessary, + // because the confluent implementation reflects on the subject to get the schema to use for + // communicating with schema-registry and backward compatible evolutions fail beause if dataloss during reflection. + // For example, if a field has a default value, the reflection doesn't pick this up schema string } type unmarshReq struct { - topic string - data []byte + // topic is the kafka topic being read from + topic string + // data is the message value which will be unmarshalled to a type + data []byte + // target is the stuct which is to be hydrated by the contents of data target any } +var _ kFormatter = (*avroSchemaRegistryFormatter)(nil) +var _ kFormatter = (*zfmtShim)(nil) + +// kFormatter is zkafka special formatter. +// It extends zfmt options, and works with schema registry. type kFormatter interface { marshall(req marshReq) ([]byte, error) unmarshal(req unmarshReq) error } -var _ kFormatter = (*avroSchemaRegistryFormatter)(nil) -var _ kFormatter = (*zfmtShim)(nil) - +// zfmtShim is a shim type which allows +// zfmt formatters to work the kFormatter type zfmtShim struct { F zfmt.Formatter } @@ -66,12 +77,6 @@ func (f zfmtShim) unmarshal(req unmarshReq) error { return f.F.Unmarshal(req.data, req.target) } -type formatterArgs struct { - formatter zfmt.FormatterType - schemaID int - srCfg SchemaRegistryConfig -} - // errFormatter is a formatter that returns error when called. The error will remind the user // to provide appropriate implementation type errFormatter struct{} diff --git a/message.go b/message.go index 5e2850c..6486ebd 100644 --- a/message.go +++ b/message.go @@ -57,7 +57,7 @@ func (m *Message) Decode(v any) error { func (m *Message) unmarshall(target any) error { if m.fmt == nil { - return errors.New("formatter or confluent formatter is not supplied to decode kafka message") + return errors.New("formatter is not supplied to decode kafka message") } return m.fmt.unmarshal(unmarshReq{ topic: m.Topic, diff --git a/test/schema_registry_evo_test.go b/test/schema_registry_evo_test.go index f42e02a..5a82e08 100644 --- a/test/schema_registry_evo_test.go +++ b/test/schema_registry_evo_test.go @@ -1,3 +1,6 @@ +// Build tag is added here because the proto evolution test creates a package loading runtime error. +// In the pipeline, this error is suppressed with an envvar. However, this repo wants to remain idiomatic +// and devs should be able to run `go test ./...` without the package loading runtime error. //go:build evolution_test // +build evolution_test @@ -367,3 +370,331 @@ func Test_SchemaRegistryReal_JSON_AutoRegisterSchemas_BackwardCompatibleSchemasC require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(json2.DummyEvent{})) } + +func Test_SchemaRegistry_Avro_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema1, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + Schema: dummyEventSchema2, + }, + }, + }) + require.NoError(t, err) + + evt1 := avro1.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, evt1) + require.NoError(t, err) + + evt2 := avro2.DummyEvent{ + IntField: int(rand.Int31()), + DoubleField: rand.Float64(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewFieldWithDefault: ptr(uuid.NewString()), + } + _, err = writer2.Write(ctx, evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.AvroSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := avro1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1) + + receivedEvt2Schema1 := avro1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := avro1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1) + + receivedEvt2Schema2 := avro2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2) +} + +func Test_SchemaRegistry_Proto_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := proto1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + // write msg1, and msg2 + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := proto2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.ProtoSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := proto1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema1 := proto1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := proto1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) + + receivedEvt2Schema2 := proto2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(proto2.DummyEvent{})) +} + +func Test_SchemaRegistry_JSON_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { + checkShouldSkipTest(t, enableKafkaBrokerTest) + + ctx := context.Background() + topic := "integration-test-topic-2" + uuid.NewString() + bootstrapServer := getBootstrap() + + createTopic(t, bootstrapServer, topic, 1) + t.Logf("Created topic: %s", topic) + + groupID := uuid.NewString() + + client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) + defer func() { require.NoError(t, client.Close()) }() + + t.Log("Created writer with auto registered schemas") + writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ + ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + Serialization: zkafka.SerializationConfig{ + AutoRegisterSchemas: true, + }, + }, + }) + require.NoError(t, err) + + evt1 := json1.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + } + _, err = writer1.Write(ctx, &evt1) + require.NoError(t, err) + + evt2 := json2.DummyEvent{ + IntField: rand.Int63(), + DoubleField: rand.Float32(), + StringField: uuid.NewString(), + BoolField: true, + BytesField: []byte(uuid.NewString()), + NewField: uuid.NewString(), + } + _, err = writer2.Write(ctx, &evt2) + require.NoError(t, err) + + consumerTopicConfig := zkafka.ConsumerTopicConfig{ + ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), + Topic: topic, + Formatter: zkafka.JSONSchemaRegistry, + SchemaRegistry: zkafka.SchemaRegistryConfig{ + URL: "mock://", + }, + GroupID: groupID, + AdditionalProps: map[string]any{ + "auto.offset.reset": "earliest", + }, + } + reader, err := client.Reader(ctx, consumerTopicConfig) + require.NoError(t, err) + + t.Log("Begin reading messages") + results, err := readMessages(reader, 2) + require.NoError(t, err) + + msg1 := <-results + msg2 := <-results + t.Log("Close reader") + + require.NoError(t, reader.Close()) + + receivedEvt1 := json1.DummyEvent{} + require.NoError(t, msg1.Decode(&receivedEvt1)) + assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema1 := json1.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) + expectedEvt2 := json1.DummyEvent{ + IntField: evt2.IntField, + DoubleField: evt2.DoubleField, + StringField: evt2.StringField, + BoolField: evt2.BoolField, + BytesField: evt2.BytesField, + } + assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) + + receivedEvt2Schema2 := json2.DummyEvent{} + require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) + assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(json2.DummyEvent{})) +} diff --git a/test/schema_registry_test.go b/test/schema_registry_test.go index 0a9438e..3d4e3ca 100644 --- a/test/schema_registry_test.go +++ b/test/schema_registry_test.go @@ -8,17 +8,11 @@ import ( "os" "testing" - "github.com/google/go-cmp/cmp/cmpopts" "github.com/google/uuid" "github.com/stretchr/testify/require" "github.com/zillow/zfmt" "github.com/zillow/zkafka" "github.com/zillow/zkafka/test/evolution/avro1" - "github.com/zillow/zkafka/test/evolution/avro2" - "github.com/zillow/zkafka/test/evolution/json1" - "github.com/zillow/zkafka/test/evolution/json2" - "github.com/zillow/zkafka/test/evolution/proto1" - "github.com/zillow/zkafka/test/evolution/proto2" ) //go:embed evolution/schema_1.avsc @@ -30,334 +24,6 @@ var dummyEventSchema2 string const enableSchemaRegistryTest = "ENABLE_SCHEMA_REGISTRY_TESTS" const enableKafkaBrokerTest = "ENABLE_KAFKA_BROKER_TESTS" -func Test_SchemaRegistry_Avro_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { - checkShouldSkipTest(t, enableKafkaBrokerTest) - - ctx := context.Background() - topic := "integration-test-topic-2" + uuid.NewString() - bootstrapServer := getBootstrap() - - createTopic(t, bootstrapServer, topic, 1) - t.Logf("Created topic: %s", topic) - - groupID := uuid.NewString() - - client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) - defer func() { require.NoError(t, client.Close()) }() - - t.Log("Created writer with auto registered schemas") - writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ - ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.AvroSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - Schema: dummyEventSchema1, - }, - }, - }) - require.NoError(t, err) - - writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ - ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.AvroSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - Schema: dummyEventSchema2, - }, - }, - }) - require.NoError(t, err) - - evt1 := avro1.DummyEvent{ - IntField: int(rand.Int31()), - DoubleField: rand.Float64(), - StringField: uuid.NewString(), - BoolField: true, - BytesField: []byte(uuid.NewString()), - } - // write msg1, and msg2 - _, err = writer1.Write(ctx, evt1) - require.NoError(t, err) - - evt2 := avro2.DummyEvent{ - IntField: int(rand.Int31()), - DoubleField: rand.Float64(), - StringField: uuid.NewString(), - BoolField: true, - BytesField: []byte(uuid.NewString()), - NewFieldWithDefault: ptr(uuid.NewString()), - } - _, err = writer2.Write(ctx, evt2) - require.NoError(t, err) - - consumerTopicConfig := zkafka.ConsumerTopicConfig{ - ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.AvroSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - }, - GroupID: groupID, - AdditionalProps: map[string]any{ - "auto.offset.reset": "earliest", - }, - } - reader, err := client.Reader(ctx, consumerTopicConfig) - require.NoError(t, err) - - t.Log("Begin reading messages") - results, err := readMessages(reader, 2) - require.NoError(t, err) - - msg1 := <-results - msg2 := <-results - t.Log("Close reader") - - require.NoError(t, reader.Close()) - - receivedEvt1 := avro1.DummyEvent{} - require.NoError(t, msg1.Decode(&receivedEvt1)) - assertEqual(t, evt1, receivedEvt1) - - receivedEvt2Schema1 := avro1.DummyEvent{} - require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) - expectedEvt2 := avro1.DummyEvent{ - IntField: evt2.IntField, - DoubleField: evt2.DoubleField, - StringField: evt2.StringField, - BoolField: evt2.BoolField, - BytesField: evt2.BytesField, - } - assertEqual(t, expectedEvt2, receivedEvt2Schema1) - - receivedEvt2Schema2 := avro2.DummyEvent{} - require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) - assertEqual(t, evt2, receivedEvt2Schema2) -} - -func Test_SchemaRegistry_Proto_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { - checkShouldSkipTest(t, enableKafkaBrokerTest) - - ctx := context.Background() - topic := "integration-test-topic-2" + uuid.NewString() - bootstrapServer := getBootstrap() - - createTopic(t, bootstrapServer, topic, 1) - t.Logf("Created topic: %s", topic) - - groupID := uuid.NewString() - - client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) - defer func() { require.NoError(t, client.Close()) }() - - t.Log("Created writer with auto registered schemas") - writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ - ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.ProtoSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - }, - }, - }) - require.NoError(t, err) - - writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ - ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.ProtoSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - }, - }, - }) - require.NoError(t, err) - - evt1 := proto1.DummyEvent{ - IntField: rand.Int63(), - DoubleField: rand.Float32(), - StringField: uuid.NewString(), - BoolField: true, - BytesField: []byte(uuid.NewString()), - } - // write msg1, and msg2 - _, err = writer1.Write(ctx, &evt1) - require.NoError(t, err) - - evt2 := proto2.DummyEvent{ - IntField: rand.Int63(), - DoubleField: rand.Float32(), - StringField: uuid.NewString(), - BoolField: true, - BytesField: []byte(uuid.NewString()), - NewField: uuid.NewString(), - } - _, err = writer2.Write(ctx, &evt2) - require.NoError(t, err) - - consumerTopicConfig := zkafka.ConsumerTopicConfig{ - ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.ProtoSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - }, - GroupID: groupID, - AdditionalProps: map[string]any{ - "auto.offset.reset": "earliest", - }, - } - reader, err := client.Reader(ctx, consumerTopicConfig) - require.NoError(t, err) - - t.Log("Begin reading messages") - results, err := readMessages(reader, 2) - require.NoError(t, err) - - msg1 := <-results - msg2 := <-results - t.Log("Close reader") - - require.NoError(t, reader.Close()) - - receivedEvt1 := proto1.DummyEvent{} - require.NoError(t, msg1.Decode(&receivedEvt1)) - assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) - - receivedEvt2Schema1 := proto1.DummyEvent{} - require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) - expectedEvt2 := proto1.DummyEvent{ - IntField: evt2.IntField, - DoubleField: evt2.DoubleField, - StringField: evt2.StringField, - BoolField: evt2.BoolField, - BytesField: evt2.BytesField, - } - assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(proto1.DummyEvent{})) - - receivedEvt2Schema2 := proto2.DummyEvent{} - require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) - assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(proto2.DummyEvent{})) -} - -func Test_SchemaRegistry_JSON_AutoRegisterSchemas_BackwardCompatibleSchemasCanBeRegisteredAndReadFrom(t *testing.T) { - checkShouldSkipTest(t, enableKafkaBrokerTest) - - ctx := context.Background() - topic := "integration-test-topic-2" + uuid.NewString() - bootstrapServer := getBootstrap() - - createTopic(t, bootstrapServer, topic, 1) - t.Logf("Created topic: %s", topic) - - groupID := uuid.NewString() - - client := zkafka.NewClient(zkafka.Config{BootstrapServers: []string{bootstrapServer}}, zkafka.LoggerOption(stdLogger{})) - defer func() { require.NoError(t, client.Close()) }() - - t.Log("Created writer with auto registered schemas") - writer1, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ - ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.JSONSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - }, - }, - }) - require.NoError(t, err) - - writer2, err := client.Writer(ctx, zkafka.ProducerTopicConfig{ - ClientID: fmt.Sprintf("writer-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.JSONSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - Serialization: zkafka.SerializationConfig{ - AutoRegisterSchemas: true, - }, - }, - }) - require.NoError(t, err) - - evt1 := json1.DummyEvent{ - IntField: rand.Int63(), - DoubleField: rand.Float32(), - StringField: uuid.NewString(), - BoolField: true, - BytesField: []byte(uuid.NewString()), - } - _, err = writer1.Write(ctx, &evt1) - require.NoError(t, err) - - evt2 := json2.DummyEvent{ - IntField: rand.Int63(), - DoubleField: rand.Float32(), - StringField: uuid.NewString(), - BoolField: true, - BytesField: []byte(uuid.NewString()), - NewField: uuid.NewString(), - } - _, err = writer2.Write(ctx, &evt2) - require.NoError(t, err) - - consumerTopicConfig := zkafka.ConsumerTopicConfig{ - ClientID: fmt.Sprintf("reader-%s-%s", t.Name(), uuid.NewString()), - Topic: topic, - Formatter: zkafka.JSONSchemaRegistry, - SchemaRegistry: zkafka.SchemaRegistryConfig{ - URL: "mock://", - }, - GroupID: groupID, - AdditionalProps: map[string]any{ - "auto.offset.reset": "earliest", - }, - } - reader, err := client.Reader(ctx, consumerTopicConfig) - require.NoError(t, err) - - t.Log("Begin reading messages") - results, err := readMessages(reader, 2) - require.NoError(t, err) - - msg1 := <-results - msg2 := <-results - t.Log("Close reader") - - require.NoError(t, reader.Close()) - - receivedEvt1 := json1.DummyEvent{} - require.NoError(t, msg1.Decode(&receivedEvt1)) - assertEqual(t, evt1, receivedEvt1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) - - receivedEvt2Schema1 := json1.DummyEvent{} - require.NoError(t, msg2.Decode(&receivedEvt2Schema1)) - expectedEvt2 := json1.DummyEvent{ - IntField: evt2.IntField, - DoubleField: evt2.DoubleField, - StringField: evt2.StringField, - BoolField: evt2.BoolField, - BytesField: evt2.BytesField, - } - assertEqual(t, expectedEvt2, receivedEvt2Schema1, cmpopts.IgnoreUnexported(json1.DummyEvent{})) - - receivedEvt2Schema2 := json2.DummyEvent{} - require.NoError(t, msg2.Decode(&receivedEvt2Schema2)) - assertEqual(t, evt2, receivedEvt2Schema2, cmpopts.IgnoreUnexported(json2.DummyEvent{})) -} - func Test_SchemaRegistry_AutoRegisterSchemasFalse_WillNotWriteMessage(t *testing.T) { checkShouldSkipTest(t, enableKafkaBrokerTest) diff --git a/test/worker_test.go b/test/worker_test.go index d1334c9..a16292c 100644 --- a/test/worker_test.go +++ b/test/worker_test.go @@ -488,8 +488,7 @@ func TestWork_Run_SpedUpIsFaster(t *testing.T) { }).AnyTimes() mockReader.EXPECT().Close().Return(nil).AnyTimes() - mockClientProvider := zkafka_mocks.NewMockClientProvider(ctrl) - mockClientProvider.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(2).Return(mockReader, nil) + mockClientProvider := zkafka.FakeClient{R: mockReader} kwf := zkafka.NewWorkFactory(mockClientProvider, zkafka.WithLogger(zkafka.NoopLogger{})) slow := fakeProcessor{ @@ -570,8 +569,7 @@ func TestKafkaWork_ProcessorReturnsErrorIsLoggedAsWarning(t *testing.T) { }) mockReader := zkafka_mocks.NewMockReader(ctrl) mockReader.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - mockClientProvider := zkafka_mocks.NewMockClientProvider(ctrl) - mockClientProvider.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(mockReader, nil) + mockClientProvider := zkafka.FakeClient{R: mockReader} processor := fakeProcessor{ process: func(ctx context.Context, message *zkafka.Message) error { @@ -622,8 +620,7 @@ func TestKafkaWork_ProcessorTimeoutCausesContextCancellation(t *testing.T) { mockReader := zkafka_mocks.NewMockReader(ctrl) mockReader.EXPECT().Read(gomock.Any()).AnyTimes().Return(msg, nil) - mockClientProvider := zkafka_mocks.NewMockClientProvider(ctrl) - mockClientProvider.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(mockReader, nil) + mockClientProvider := zkafka.FakeClient{R: mockReader} wf := zkafka.NewWorkFactory(mockClientProvider, zkafka.WithLogger(l)) @@ -682,9 +679,7 @@ func TestWork_WithDeadLetterTopic_NoMessagesWrittenToDLTSinceNoErrorsOccurred(t mockWriter.EXPECT().Write(gomock.Any(), gomock.Any()).Times(0) mockWriter.EXPECT().Close().AnyTimes() - mockClientProvider := zkafka_mocks.NewMockClientProvider(ctrl) - mockClientProvider.EXPECT().Reader(gomock.Any(), gomock.Any()).Times(1).Return(mockReader, nil) - mockClientProvider.EXPECT().Writer(gomock.Any(), gomock.Any()).Times(2).Return(mockWriter, nil) + mockClientProvider := zkafka.FakeClient{R: mockReader, W: mockWriter} kwf := zkafka.NewWorkFactory(mockClientProvider, zkafka.WithLogger(l)) diff --git a/testhelper.go b/testhelper.go index 1ce4b3e..c2a2040 100644 --- a/testhelper.go +++ b/testhelper.go @@ -92,6 +92,9 @@ func GetMsgFromFake(input *FakeMessage) *Message { var _ ClientProvider = (*FakeClient)(nil) +// FakeClient is a convenience struct for testing purposes. +// It allows the specification of your own Reader/Writer while implementing the `ClientProvider` interface, +// which makes it compatible with a work factory. type FakeClient struct { R Reader W Writer diff --git a/work.go b/work.go index b676bc9..e20c067 100644 --- a/work.go +++ b/work.go @@ -841,7 +841,7 @@ func (c *delayCalculator) remaining(targetDelay time.Duration, msgTimeStamp time var _ processor = (*processorAdapter)(nil) type processorAdapter struct { - p func(_ context.Context, msg *Message) error + p func(_ context.Context, _ *Message) error } func (a processorAdapter) Process(ctx context.Context, message *Message) error { diff --git a/work_test.go b/work_test.go index f6ac8cd..1dfc6b5 100644 --- a/work_test.go +++ b/work_test.go @@ -71,7 +71,7 @@ func TestWork_WithOptions(t *testing.T) { tp := noop.TracerProvider{} propagator := propagation.TraceContext{} - wf := NewWorkFactory(mockClientProvider{}, WithTracerProvider(tp), WithTextMapPropagator(propagator)) + wf := NewWorkFactory(FakeClient{}, WithTracerProvider(tp), WithTextMapPropagator(propagator)) work := wf.Create(ConsumerTopicConfig{}, &timeDelayProcessor{}) @@ -741,7 +741,7 @@ func Fuzz_AnySpeedupInputAlwaysCreatesABufferedChannel(f *testing.F) { f.Add(uint16(9)) f.Fuzz(func(t *testing.T, speedup uint16) { - wf := NewWorkFactory(mockClientProvider{}) + wf := NewWorkFactory(FakeClient{}) p := timeDelayProcessor{} w := wf.Create(ConsumerTopicConfig{}, &p, Speedup(speedup)) require.Greater(t, cap(w.messageBuffer), 0) @@ -783,23 +783,6 @@ func (m *timeDelayProcessor) Process(_ context.Context, message *Message) error return nil } -type mockClientProvider struct { - r Reader - w Writer -} - -func (m mockClientProvider) Reader(ctx context.Context, topicConfig ConsumerTopicConfig, opts ...ReaderOption) (Reader, error) { - return m.r, nil -} - -func (m mockClientProvider) Writer(ctx context.Context, topicConfig ProducerTopicConfig, opts ...WriterOption) (Writer, error) { - return m.w, nil -} - -func (mockClientProvider) Close() error { - return nil -} - func assertContains(t *testing.T, wantIn kafka.TopicPartition, options []kafka.TopicPartition) { t.Helper() for _, want := range options { From 4bc4a7929cecd8e7f071ee40ec43811324b6f123 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 20:51:41 -0700 Subject: [PATCH 27/34] Updated --- writer.go | 21 +++++++-------------- writer_test.go | 2 +- 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/writer.go b/writer.go index 8569edc..7c70eab 100644 --- a/writer.go +++ b/writer.go @@ -96,8 +96,8 @@ func newWriter(args writerArgs) (*KWriter, error) { for _, opt := range args.opts { opt(&s) } - if s.fmtter != nil { - w.fmtter = s.fmtter + if s.f != nil { + w.fmtter = s.f } return w, nil } @@ -216,10 +216,7 @@ func (w *KWriter) startSpan(ctx context.Context, msg *kafka.Message) spanWrapper } func (w *KWriter) write(ctx context.Context, msg keyValuePair, opts ...WriteOption) (Response, error) { - s := writeSettings{ - avroSchema: w.topicConfig.SchemaRegistry.Serialization.Schema, - } - value, err := w.marshall(ctx, msg.value, s.avroSchema) + value, err := w.marshall(ctx, msg.value, w.topicConfig.SchemaRegistry.Serialization.Schema) if err != nil { return Response{}, err } @@ -247,17 +244,17 @@ func (w *KWriter) Close() { } type WriterSettings struct { - fmtter kFormatter + f kFormatter } // WriterOption is a function that modify the writer configurations type WriterOption func(*WriterSettings) // WFormatterOption sets the formatter for this writer -func WFormatterOption(fmtter Formatter) WriterOption { +func WFormatterOption(f Formatter) WriterOption { return func(s *WriterSettings) { - if fmtter != nil { - s.fmtter = zfmtShim{F: fmtter} + if f != nil { + s.f = zfmtShim{F: f} } } } @@ -267,10 +264,6 @@ type WriteOption interface { apply(s *kafka.Message) } -type writeSettings struct { - avroSchema string -} - // WithHeaders allows for the specification of headers. Specified headers will override collisions. func WithHeaders(headers map[string]string) WriteOption { return withHeaderOption{headers: headers} } diff --git a/writer_test.go b/writer_test.go index 798d949..05a0ce9 100644 --- a/writer_test.go +++ b/writer_test.go @@ -567,7 +567,7 @@ func TestWriter_WithOptions(t *testing.T) { settings := WriterSettings{} WFormatterOption(&zfmt.StringFormatter{})(&settings) - require.NotNil(t, settings.fmtter, "expected non-nil formatter") + require.NotNil(t, settings.f, "expected non-nil formatter") } func Test_writeAttributeCarrier_Set(t *testing.T) { From f25b0ca6b79fa003b552a46b2ba502f054b3beff Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 21:02:07 -0700 Subject: [PATCH 28/34] Updated --- example/producer_avro/main.go | 12 +++- example/worker_avro/go.mod | 18 +++++- example/worker_avro/go.sum | 100 +++++++++++++++++++++++++++++++++- example/worker_avro/main.go | 17 ++---- work.go | 2 + 5 files changed, 130 insertions(+), 19 deletions(-) diff --git a/example/producer_avro/main.go b/example/producer_avro/main.go index b804160..f9472a5 100644 --- a/example/producer_avro/main.go +++ b/example/producer_avro/main.go @@ -23,8 +23,15 @@ func main() { Formatter: zkafka.AvroSchemaRegistry, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "http://localhost:8081", - Serialization: struct{ AutoRegisterSchemas bool }{ + Serialization: zkafka.SerializationConfig{ + // This likely isn't needed in production. A typical workflow involves registering + // a schema a priori. But for the local example, to save this setup, the flag is set to true AutoRegisterSchemas: true, + // When using avro schema registry, you must specify the schema. In this case, + // the schema used to generate the golang type is used. + // The heetch generated struct also embeds the schema as well (and isn't lossy like some of the + // other generative solutions. For example, one lib didn't include default values), so that could be used as well. + Schema: dummyEventSchema, }, }, }) @@ -38,8 +45,7 @@ func main() { StringField: randomNames[rand.Intn(len(randomNames))], } - resp, err := writer.Write(ctx, event, zkafka.WithAvroSchema(dummyEventSchema)) - //resp, err := writer.Write(ctx, &event) + resp, err := writer.Write(ctx, event) if err != nil { log.Panic(err) } diff --git a/example/worker_avro/go.mod b/example/worker_avro/go.mod index 2bb9cf1..a5646ae 100644 --- a/example/worker_avro/go.mod +++ b/example/worker_avro/go.mod @@ -6,17 +6,33 @@ replace github.com/zillow/zkafka v1.0.0 => ../.. require ( github.com/google/uuid v1.6.0 + github.com/heetch/avro v0.4.5 + github.com/zillow/zkafka v1.0.0 ) require ( github.com/actgardner/gogen-avro/v10 v10.2.1 // indirect + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/bufbuild/protocompile v0.8.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/confluentinc/confluent-kafka-go/v2 v2.5.0 // indirect github.com/golang/protobuf v1.5.4 // indirect - github.com/heetch/avro v0.4.5 // indirect + github.com/hamba/avro/v2 v2.20.1 // indirect + github.com/invopop/jsonschema v0.12.0 // indirect + github.com/jhump/protoreflect v1.15.6 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/santhosh-tekuri/jsonschema/v5 v5.3.0 // indirect github.com/sony/gobreaker v1.0.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/zillow/zfmt v1.0.1 // indirect go.opentelemetry.io/otel v1.28.0 // indirect go.opentelemetry.io/otel/trace v1.28.0 // indirect golang.org/x/sync v0.7.0 // indirect + google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa // indirect google.golang.org/protobuf v1.34.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/example/worker_avro/go.sum b/example/worker_avro/go.sum index f1a03d8..ef03f36 100644 --- a/example/worker_avro/go.sum +++ b/example/worker_avro/go.sum @@ -1,11 +1,28 @@ +cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM= +cloud.google.com/go/compute v1.25.1 h1:ZRpHJedLtTpKgr3RV1Fx23NuaAEN1Zfx9hw1u4aJdjU= +cloud.google.com/go/compute v1.25.1/go.mod h1:oopOIR53ly6viBYxaDhBfJwzUAxf1zE//uf3IB011ls= +cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= +cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0 h1:n1DH8TPV4qqPTje2RcUBYwtrTWlabVp4n46+74X2pn4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.10.0/go.mod h1:HDcZnuGbiyppErN6lB+idp4CKhjbc8gwjto6OPpyggM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2 h1:LqbJ/WzJUwBf8UiaSzgX7aMclParm9/5Vgp+TY51uBQ= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0 h1:DRiANoJTiW6obBQe3SqZizkuV1PEgfiiGivmVocDy64= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0/go.mod h1:qLIye2hwb/ZouqhpSD9Zn3SJipvpEnz1Ywl3VUk9Y0s= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= +github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= @@ -16,6 +33,8 @@ github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpH github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/actgardner/gogen-avro/v10 v10.2.1 h1:z3pOGblRjAJCYpkIJ8CmbMJdksi4rAhaygw0dyXZ930= github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= github.com/aws/aws-sdk-go-v2/config v1.27.10 h1:PS+65jThT0T/snC5WjyfHHyUgG+eBoupSDV+f838cro= @@ -34,6 +53,8 @@ github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2 h1:Ji0DY1x github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.2/go.mod h1:5CsjAbs3NlGQyZNFACh+zztPDI7fU6eW9QsxjfnuBKg= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7 h1:ogRAwT1/gxJBcSWDMZlgyFUM962F51A5CRhDLbxLdmo= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.7/go.mod h1:YCsIZhXfRPLFFCl5xxY+1T9RKzOKjCut+28JSX2DnAk= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1 h1:SBn4I0fJXF9FYOVRSVMWuhvEKoAHDikjGpS3wlmw5DE= +github.com/aws/aws-sdk-go-v2/service/kms v1.30.1/go.mod h1:2snWQJQUKsbN66vAawJuOGX7dr37pfOq9hb0tZDGIqQ= github.com/aws/aws-sdk-go-v2/service/sso v1.20.4 h1:WzFol5Cd+yDxPAdnzTA5LmpHYSWinhmSj4rQChV0ee8= github.com/aws/aws-sdk-go-v2/service/sso v1.20.4/go.mod h1:qGzynb/msuZIE8I75DVRCUXw3o3ZyBmUvMwQ2t/BrGM= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.4 h1:Jux+gDDyi1Lruk+KHF91tK2KCuY61kzoCpvtvJJBtOE= @@ -42,10 +63,18 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.28.6 h1:cwIxeBttqPN3qkaAjcEcsh8NYr8n github.com/aws/aws-sdk-go-v2/service/sts v1.28.6/go.mod h1:FZf1/nKNEkHdGGJP/cI2MoIMquumuRK6ol3QQJNDxmw= github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bufbuild/protocompile v0.8.0 h1:9Kp1q6OkS9L4nM3FYbr8vlJnEwtbpDPQlQOVXfR+78s= +github.com/bufbuild/protocompile v0.8.0/go.mod h1:+Etjg4guZoAqzVk2czwEQP12yaxLJ8DxuqCJ9qHdH94= github.com/buger/goterm v1.0.4 h1:Z9YvGmOih81P0FbVtEYTFF6YsSgxSUKEhf/f9bTMXbY= github.com/buger/goterm v1.0.4/go.mod h1:HiFWV3xnkolgrBV3mY8m0X0Pumt4zg4QhbdOzQtB8tE= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/cenkalti/backoff/v3 v3.0.0 h1:ske+9nBpD9qZsTBoF41nW5L+AIuFBKMeze18XQ3eG1c= +github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs= github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= @@ -105,6 +134,8 @@ github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvn github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= github.com/fvbommel/sortorder v1.0.2 h1:mV4o8B2hKboCdkJm+a7uX/SIpZob4JzUpc5GGnM45eo= github.com/fvbommel/sortorder v1.0.2/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= +github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= +github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= @@ -117,28 +148,43 @@ github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2Kv github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= github.com/gogo/googleapis v1.4.1 h1:1Yx4Myt7BxzvUr5ldGSbwYiZG6t9wGBZ+8/fX3Wvtq0= github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw= +github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/cel-go v0.20.1 h1:nDx9r8S3L4pE61eDdt8igGj8rf5kjYR3ILxWIpWNi84= +github.com/google/cel-go v0.20.1/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg= github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= +github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= +github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.12.2 h1:mhN09QQW1jEWeMF74zGR81R30z4VJzjZsfkUhuHF+DA= +github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= @@ -147,14 +193,30 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaW github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hamba/avro/v2 v2.20.1 h1:3WByQiVn7wT7d27WQq6pvBRC00FVOrniP6u67FLA/2E= +github.com/hamba/avro/v2 v2.20.1/go.mod h1:xHiKXbISpb3Ovc809XdzWow+XGTn+Oyf/F9aZbTLAig= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.7.5 h1:bJj+Pj19UZMIweq/iie+1u5YCdGrnxCT9yvm0e+Nd5M= +github.com/hashicorp/go-retryablehttp v0.7.5/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6 h1:om4Al8Oy7kCm/B86rLCLah4Dt5Aa0Fr5rYBG60OzwHQ= +github.com/hashicorp/go-secure-stdlib/parseutil v0.1.6/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts= +github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= +github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc= +github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A= github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/vault/api v1.12.1 h1:WzGN4X5jrJdNO39g6Sa55djNio3I9DxEBOTmCZE7tm0= +github.com/hashicorp/vault/api v1.12.1/go.mod h1:1pqP/sErScodde+ybJCyP+ONC4jzEg7Dmawg/QLWo1k= github.com/heetch/avro v0.4.5 h1:BSnj4wEeUG1IjMTm9/tBwQnV3euuIVa1mRWHnm1t8VU= github.com/heetch/avro v0.4.5/go.mod h1:gxf9GnbjTXmWmqxhdNbAMcZCjpye7RV5r9t3Q0dL6ws= github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= @@ -163,6 +225,10 @@ github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI= +github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= +github.com/jhump/protoreflect v1.15.6 h1:WMYJbw2Wo+KOWwZFvgY0jMoVHM6i4XIvRs2RcBj5VmI= +github.com/jhump/protoreflect v1.15.6/go.mod h1:jCHoyYQIJnaabEYnbGwyo9hUqfyUMTbJw/tAut5t97E= github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4= github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= @@ -177,6 +243,8 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -199,6 +267,8 @@ github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= @@ -225,6 +295,7 @@ github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg= github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= @@ -241,6 +312,8 @@ github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQ github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -261,6 +334,10 @@ github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= +github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.0 h1:uIkTLo0AGRc8l7h5l9r+GcYi9qfVPt6lD4/bhmzfiKo= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= @@ -271,6 +348,8 @@ github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11 github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM= github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= +github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= @@ -281,6 +360,8 @@ github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= @@ -293,6 +374,12 @@ github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4D github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0 h1:A/2tIdYXqUuVZeWy0Yq/PWKsXgebzMyh5mLbpNEMVUo= +github.com/tink-crypto/tink-go-gcpkms/v2 v2.1.0/go.mod h1:QXPc/i5yUEWWZ4lbe2WOam1kDdrXjGHRjl0Lzo7IQDU= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0 h1:REG5YX2omhgPmiIT7GLqmzWFnIksZsog1FHJ+Pi1xJE= +github.com/tink-crypto/tink-go-hcvault/v2 v2.1.0/go.mod h1:OJLS+EYJo/BTViJj7EBG5deKLeQfYwVNW8HMS1qHAAo= +github.com/tink-crypto/tink-go/v2 v2.1.0 h1:QXFBguwMwTIaU17EgZpEJWsUSc60b1BAGTzBIoMdmok= +github.com/tink-crypto/tink-go/v2 v2.1.0/go.mod h1:y1TnYFt1i2eZVfx4OGc+C+EMp4CoKWAw2VSEuoicHHI= github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= @@ -303,18 +390,22 @@ github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea h1:SXhTLE6pb6eld/ github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea/go.mod h1:WPnis/6cRcDZSUvVmezrxJPkiO87ThFYsoUiMwWNDJk= github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531 h1:Y/M5lygoNPKwVNLMPXgVfsRT40CSFKXCxuU8LoHySjs= github.com/tonistiigi/vt100 v0.0.0-20230623042737-f9a4f7ef6531/go.mod h1:ulncasL3N9uLrVann0m+CDlJKWsIAP34MPcOJF6VRvc= +github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc= +github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xiatechs/jsonata-go v1.8.5 h1:m1NaokPKD6LPaTPRl674EQz5mpkJvM3ymjdReDEP6/A= +github.com/xiatechs/jsonata-go v1.8.5/go.mod h1:yGEvviiftcdVfhSRhRSpgyTel89T58f+690iB0fp2Vk= github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/zillow/zfmt v1.0.1 h1:JLN5WaxoqqoEPUpVWer83uhXhDPAA2nZkfQqgKnWp+w= github.com/zillow/zfmt v1.0.1/go.mod h1:0PpKh4rWh+5Ghr2bbuN5UvEcqEz6PkHfE0Idgjyxy7Y= -github.com/zillow/zkafka v1.0.2 h1:xAcKfILqz5qcqcfcPYw3nXJXr9nOeXbgMfVhQNilINg= -github.com/zillow/zkafka v1.0.2/go.mod h1:cA7XPO8b91ByfgDW6iBujB5vB98U1GP73Vraq0z6WRw= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.46.1 h1:gbhw/u49SS3gkPWiYweQNJGm/uJN5GkI/FrosxSHT7A= @@ -367,6 +458,8 @@ golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +google.golang.org/api v0.169.0 h1:QwWPy71FgMWqJN/l6jVlFHUa29a7dcUy02I8o799nPY= +google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20240325203815-454cdb8f5daa h1:ePqxpG3LVx+feAUOx8YmR5T7rc0rdzK8DyxM8cQ9zq0= @@ -381,6 +474,9 @@ google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6h google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y= gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= diff --git a/example/worker_avro/main.go b/example/worker_avro/main.go index 0264cd6..8547625 100644 --- a/example/worker_avro/main.go +++ b/example/worker_avro/main.go @@ -22,8 +22,6 @@ func main() { client := zkafka.NewClient(zkafka.Config{ BootstrapServers: []string{"localhost:29092"}, }, - // optionally add a logger, which implements zkafka.Logger, to see detailed information about message processsing - //zkafka.LoggerOption(), ) // It's important to close the client after consumption to gracefully leave the consumer group // (this commits completed work, and informs the broker that this consumer is leaving the group which yields a faster rebalance) @@ -43,10 +41,10 @@ func main() { // The formatter is registered internally to the `zkafka.Message` and used when calling `msg.Decode()` // string fmt can be used for both binary and pure strings encoded in the value field of the kafka message. Other options include // json, proto, avro, etc. - Formatter: zkafka.AvroConfluentFmt, + Formatter: zkafka.AvroSchemaRegistry, SchemaRegistry: zkafka.SchemaRegistryConfig{ URL: "http://localhost:8081", - Deserialization: struct{}{}, + Deserialization: zkafka.DeserializationConfig{}, }, AdditionalProps: map[string]any{ // only important the first time a consumer group connects. Subsequent connections will start @@ -69,15 +67,13 @@ func main() { wf := zkafka.NewWorkFactory(client) // Register a processor which is executed per message. // Speedup is used to create multiple processor goroutines. Order is still maintained with this setup by way of `virtual partitions` - work := wf.Create(topicConfig, &Processor{}, zkafka.Speedup(1)) + work := wf.CreateWithFunc(topicConfig, Process, zkafka.Speedup(1)) if err := work.Run(ctx, shutdown); err != nil { log.Panic(err) } } -type Processor struct{} - -func (p Processor) Process(_ context.Context, msg *zkafka.Message) error { +func Process(_ context.Context, msg *zkafka.Message) error { // sleep to simulate random amount of work time.Sleep(100 * time.Millisecond) event := DummyEvent{} @@ -86,11 +82,6 @@ func (p Processor) Process(_ context.Context, msg *zkafka.Message) error { log.Printf("error occurred during processing: %s", err) return err } - // optionally, if you don't want to use the configured formatter at all, access the kafka message payload bytes directly. - // The commented out block shows accessing the byte array. In this case we're stringifying the bytes, but this could be json unmarshalled, - // proto unmarshalled etc., depending on the expected payload - //data := msg.Value() - //str := string(data) log.Printf(" offset: %d, partition: %d. event.Age: %d, event.Name %s\n", msg.Offset, msg.Partition, event.IntField, event.StringField) return nil diff --git a/work.go b/work.go index e20c067..ded5107 100644 --- a/work.go +++ b/work.go @@ -653,6 +653,8 @@ func NewWorkFactory( return factory } +// CreateWithFunc creates a new Work instance, but allows for the processor to be specified as a callback function +// instead of an interface func (f WorkFactory) CreateWithFunc(topicConfig ConsumerTopicConfig, p func(_ context.Context, msg *Message) error, options ...WorkOption) *Work { return f.Create(topicConfig, processorAdapter{p: p}, options...) } From 554c0f945d8df6317687a8ea39eb05e48ebb8230 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Sun, 22 Sep 2024 21:15:36 -0700 Subject: [PATCH 29/34] Updated changelog and README.md --- README.md | 16 ++++++++++------ changelog.md | 7 +++++++ 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 3657f6b..6c9b9f4 100644 --- a/README.md +++ b/README.md @@ -270,13 +270,17 @@ special processing of these messages. ### SchemaRegistry Support: -There is limited support for schema registry in zkafka. A schemaID can be hardcoded via configuration. No -communication is done with schema registry, but some primitive checks can be conducted if a schemaID is specified via -configuration. +zkafka supports schema registry. It extends `zfmt` to enable this adding three `zfmt.FormatterType`: +``` + AvroSchemaRegistry zfmt.FormatterType = "avro_schema_registry" + ProtoSchemaRegistry zfmt.FormatterType = "proto_schema_registry" + JSONSchemaRegistry zfmt.FormatterType = "json_schema_registry" +``` + +This can be used in ProducerTopicConfig/ConsumerTopicConfig just like the others. Examples have been added +`example/producer_avro` and `example/worker_avro` which demonstrate the additional configuration (mostly there to enable the +schema registry communication that's required) -Below is a breakdown of schema registry interactions into two subcategories. One is `Raw Handling` where the configurable -foramtter is bypassed entirely in favor of operating with the value byte arrays directly. The other is `Native Support` which -attempts to create confluent compatible serializations, without communicating with schema registry directly. #### Producers diff --git a/changelog.md b/changelog.md index ca74bb1..56f6277 100644 --- a/changelog.md +++ b/changelog.md @@ -4,6 +4,13 @@ All notable changes to this project will be documented in this file. This project adheres to Semantic Versioning. +## 1.1.0 (Sep 22, 2024) + +1. Added support for schemaregistry (avro, proto, json). Extended `zfmt.FormatterType` types to include `avro_schema_registry`, `proto_schema_registry` and `json_schema_registry` +2. Added lifecycle function `LifecyclePostReadImmediate` +3. Added `workFactory.CreateWithFunc` which is a convenience work factory method for creating work using a callback instead of an interface (can reduce boilerplate) in some scenarios. + + ## 1.0.2 (Sep 6, 2024) 1. Updated `WithDeadLetterTopic` option to borrow username and password from ConsumerTopicConfig when those issues aren't specified on DeadLetterTopicConfig From 1fdba5b36bad390826ca82f7f672967aa0e2761e Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 23 Sep 2024 09:08:15 -0700 Subject: [PATCH 30/34] Updated some tests --- changelog.md | 1 + client_test.go | 62 ++++++++++++++++++++++++++++++++++++++++++-------- config.go | 26 ++++++++++++++++----- writer.go | 6 ++++- writer_test.go | 3 +++ 5 files changed, 81 insertions(+), 17 deletions(-) diff --git a/changelog.md b/changelog.md index 56f6277..e1d69eb 100644 --- a/changelog.md +++ b/changelog.md @@ -9,6 +9,7 @@ This project adheres to Semantic Versioning. 1. Added support for schemaregistry (avro, proto, json). Extended `zfmt.FormatterType` types to include `avro_schema_registry`, `proto_schema_registry` and `json_schema_registry` 2. Added lifecycle function `LifecyclePostReadImmediate` 3. Added `workFactory.CreateWithFunc` which is a convenience work factory method for creating work using a callback instead of an interface (can reduce boilerplate) in some scenarios. +4. During the creation of readers/writers an error is now returned if bootstrap servers is empty ## 1.0.2 (Sep 6, 2024) diff --git a/client_test.go b/client_test.go index 144e083..b7b5c18 100644 --- a/client_test.go +++ b/client_test.go @@ -344,6 +344,7 @@ func TestClient_Writer(t *testing.T) { { name: "create new KWriter for closed writer", fields: fields{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, writers: map[string]*KWriter{ "test-id": &KWriter{isClosed: true}, }, @@ -373,6 +374,7 @@ func TestClient_Writer(t *testing.T) { { name: "create new KWriter for closed writer with default overrides", fields: fields{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, writers: map[string]*KWriter{ "test-id": &KWriter{isClosed: true}, }, @@ -780,10 +782,27 @@ func Test_makeConfig_Consumer(t *testing.T) { prefix string } tests := []struct { - name string - args args - want kafka.ConfigMap + name string + args args + want kafka.ConfigMap + wantErr string }{ + { + name: "missing bootstrap", + args: args{ + conf: Config{}, + topicConfig: ConsumerTopicConfig{ + ClientID: "clientid", + GroupID: "group", + Topic: "", + Formatter: "", + SchemaID: 0, + Transaction: true, + }, + }, + wantErr: "invalid consumer config, missing bootstrap server addresses", + }, + { name: "with transaction", args: args{ @@ -996,8 +1015,12 @@ func Test_makeConfig_Consumer(t *testing.T) { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) got, err := makeConsumerConfig(tt.args.conf, tt.args.topicConfig, tt.args.prefix) - require.NoError(t, err) - assertEqual(t, got, tt.want) + if tt.wantErr == "" { + require.NoError(t, err) + assertEqual(t, got, tt.want) + } else { + require.ErrorContains(t, err, tt.wantErr) + } }) } } @@ -1008,10 +1031,24 @@ func Test_makeConfig_Producer(t *testing.T) { topicConfig ProducerTopicConfig } tests := []struct { - name string - args args - want kafka.ConfigMap + name string + args args + want kafka.ConfigMap + wantErr string }{ + { + name: "with missing bootstrap config", + args: args{ + conf: Config{}, + topicConfig: ProducerTopicConfig{ + ClientID: "clientid", + Topic: "yyy", + Transaction: true, + }, + }, + wantErr: "invalid producer config, missing bootstrap server addresses", + }, + { name: "with transaction", args: args{ @@ -1212,8 +1249,13 @@ func Test_makeConfig_Producer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { defer recoverThenFail(t) - got := makeProducerConfig(tt.args.conf, tt.args.topicConfig) - assertEqual(t, got, tt.want) + got, err := makeProducerConfig(tt.args.conf, tt.args.topicConfig) + if tt.wantErr == "" { + require.NoError(t, err) + assertEqual(t, got, tt.want) + } else { + require.ErrorContains(t, err, tt.wantErr) + } }) } } diff --git a/config.go b/config.go index dace81a..6d12b5d 100644 --- a/config.go +++ b/config.go @@ -174,6 +174,8 @@ type ProducerTopicConfig struct { // Formatter is json if not defined Formatter zfmt.FormatterType + // SchemaRegistry provides details about connecting to a schema registry including URL + // as well as others. SchemaRegistry SchemaRegistryConfig // SchemaID defines the schema registered with Confluent schema Registry @@ -212,14 +214,22 @@ func (p ProducerTopicConfig) GetSchemaID() int { } type SchemaRegistryConfig struct { - URL string - Serialization SerializationConfig + // URL is the schema registry URL. During serialization and deserialization + // schema registry is checked against to confirm schema compatability. + URL string + // Serialization provides additional information used by schema registry formatters during serialization (data write) + Serialization SerializationConfig + // Deserialization provides additional information used by schema registry formatters during deserialization (data read) Deserialization DeserializationConfig } type SerializationConfig struct { + // AutoRegisterSchemas indicates whether new schemas (those that evolve existing schemas or are brand new) should be registered + // with schema registry dynamically. This feature is typically not used for production workloads AutoRegisterSchemas bool - Schema string + // Schema is used exclusively by the avro schema registry formatter today. Its necessary to provide proper schema evolution properties + // expected by typical use cases. + Schema string } type DeserializationConfig struct { @@ -323,7 +333,7 @@ func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix str addresses = topicConfig.BootstrapServers } if len(addresses) == 0 { - return nil, errors.New("invalid config, missing bootstrap server addresses") + return nil, errors.New("invalid consumer config, missing bootstrap server addresses") } configMap[bootstrapServers] = strings.Join(addresses, ",") @@ -362,7 +372,7 @@ func makeConsumerConfig(conf Config, topicConfig ConsumerTopicConfig, prefix str // makeProducerConfig creates a kafka configMap from the specified strongly typed Config and TopicConfig. // TopicConfig specifies a way to specify config values that aren't strongly typed via AdditionalProps field. // Those values are overwritten if specified in strongly typed TopicConfig fields. -func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) kafka.ConfigMap { +func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) (kafka.ConfigMap, error) { configMap := kafka.ConfigMap{} configMap[clientID] = getWriterKey(topicConfig) @@ -402,6 +412,10 @@ func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) kafka.Conf if len(topicConfig.BootstrapServers) != 0 { addresses = topicConfig.BootstrapServers } + if len(addresses) == 0 { + return nil, errors.New("invalid producer config, missing bootstrap server addresses") + } + configMap[bootstrapServers] = strings.Join(addresses, ",") saslUname := conf.SaslUsername @@ -433,5 +447,5 @@ func makeProducerConfig(conf Config, topicConfig ProducerTopicConfig) kafka.Conf configMap[key] = kafka.ConfigValue(v) } } - return configMap + return configMap, nil } diff --git a/writer.go b/writer.go index 7c70eab..0e525e2 100644 --- a/writer.go +++ b/writer.go @@ -77,7 +77,11 @@ func newWriter(args writerArgs) (*KWriter, error) { producer := args.producerProvider formatter := args.f - confluentConfig := makeProducerConfig(conf, topicConfig) + confluentConfig, err := makeProducerConfig(conf, topicConfig) + if err != nil { + return nil, err + } + p, err := producer(confluentConfig) if err != nil { return nil, err diff --git a/writer_test.go b/writer_test.go index 05a0ce9..1524ad6 100644 --- a/writer_test.go +++ b/writer_test.go @@ -506,6 +506,7 @@ func Test_newWriter(t *testing.T) { { name: "custom formatter, no error. It is implied that user will supply formatter later", args: args{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, topicConfig: ProducerTopicConfig{ Formatter: zfmt.FormatterType("custom"), }, @@ -531,8 +532,10 @@ func Test_newWriter(t *testing.T) { wantErr: true, }, { + name: "minimum config with formatter", args: args{ + conf: Config{BootstrapServers: []string{"localhost:9092"}}, producerP: defaultConfluentProducerProvider{}.NewProducer, topicConfig: ProducerTopicConfig{ Formatter: zfmt.StringFmt, From dba790b2b55a5c6a877235789843dcec64c82214 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 23 Sep 2024 09:09:41 -0700 Subject: [PATCH 31/34] Corrected error message assertion --- test/integration_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration_test.go b/test/integration_test.go index 2e8c73e..c591de9 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -1376,7 +1376,7 @@ func Test_MissingBootstrap_ShouldGiveClearError(t *testing.T) { }) err := w.Run(context.Background(), ctx.Done()) require.NoError(t, err) - require.ErrorContains(t, readErr, "invalid config, missing bootstrap server addresses") + require.ErrorContains(t, readErr, "invalid consumer config, missing bootstrap server addresses") } func createTopic(t *testing.T, bootstrapServer, topic string, partitions int) { From 18ef0e31bcc1e8d147b1c61a25634c8759862fdd Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 23 Sep 2024 09:30:14 -0700 Subject: [PATCH 32/34] Updated spelling of fmtter -> formatter Updated compose.yaml to ping kafka/zookeeper versions --- client_test.go | 28 ++++++++++++++-------------- example/compose.yaml | 4 ++-- formatter.go | 6 +++--- formatter_test.go | 10 +++++----- message_test.go | 2 +- reader.go | 18 +++++++++--------- writer.go | 10 +++++----- writer_test.go | 34 +++++++++++++++++----------------- 8 files changed, 56 insertions(+), 56 deletions(-) diff --git a/client_test.go b/client_test.go index b7b5c18..98a262f 100644 --- a/client_test.go +++ b/client_test.go @@ -203,8 +203,8 @@ func TestClient_Reader(t *testing.T) { SessionTimeoutMillis: ptr(61000), MaxPollIntervalMillis: ptr(61000), }, - logger: NoopLogger{}, - fmtter: zfmtShim{&zfmt.AvroFormatter{}}, + logger: NoopLogger{}, + formatter: zfmtShim{&zfmt.AvroFormatter{}}, }, wantErr: false, }, @@ -236,8 +236,8 @@ func TestClient_Reader(t *testing.T) { SessionTimeoutMillis: ptr(20000), MaxPollIntervalMillis: ptr(21000), }, - logger: NoopLogger{}, - fmtter: zfmtShim{&zfmt.AvroFormatter{}}, + logger: NoopLogger{}, + formatter: zfmtShim{&zfmt.AvroFormatter{}}, }, wantErr: false, }, @@ -296,7 +296,7 @@ func TestClient_Reader(t *testing.T) { assertEqual(t, a, b, cmpopts.IgnoreUnexported(MockKafkaConsumer{})) } assertEqual(t, gotReader.logger, tt.want.logger) - assertEqual(t, gotReader.fmtter, tt.want.fmtter) + assertEqual(t, gotReader.formatter, tt.want.formatter) } }) } @@ -364,10 +364,10 @@ func TestClient_Writer(t *testing.T) { NagleDisable: ptr(true), LingerMillis: 0, }, - logger: NoopLogger{}, - tracer: noop.TracerProvider{}.Tracer(""), - p: propagation.TraceContext{}, - fmtter: zfmtShim{&zfmt.ProtobufRawFormatter{}}, + logger: NoopLogger{}, + tracer: noop.TracerProvider{}.Tracer(""), + p: propagation.TraceContext{}, + formatter: zfmtShim{&zfmt.ProtobufRawFormatter{}}, }, wantErr: false, }, @@ -394,10 +394,10 @@ func TestClient_Writer(t *testing.T) { NagleDisable: ptr(false), LingerMillis: 1, }, - logger: NoopLogger{}, - tracer: noop.TracerProvider{}.Tracer(""), - p: propagation.TraceContext{}, - fmtter: zfmtShim{&zfmt.ProtobufRawFormatter{}}, + logger: NoopLogger{}, + tracer: noop.TracerProvider{}.Tracer(""), + p: propagation.TraceContext{}, + formatter: zfmtShim{&zfmt.ProtobufRawFormatter{}}, }, wantErr: false, }, @@ -446,7 +446,7 @@ func TestClient_Writer(t *testing.T) { assertEqual(t, gotKWriter.topicConfig, tt.want.topicConfig) assertEqual(t, gotKWriter.logger, tt.want.logger) - assertEqual(t, gotKWriter.fmtter, tt.want.fmtter) + assertEqual(t, gotKWriter.formatter, tt.want.formatter) }) } } diff --git a/example/compose.yaml b/example/compose.yaml index 25505ec..f197d20 100644 --- a/example/compose.yaml +++ b/example/compose.yaml @@ -1,6 +1,6 @@ services: zookeeper: - image: confluentinc/cp-zookeeper:latest + image: confluentinc/cp-zookeeper:7.7.1 container_name: zkafka-zookeeper environment: ZOOKEEPER_CLIENT_PORT: 2181 @@ -8,7 +8,7 @@ services: ports: - "22181:2181" kafka: - image: confluentinc/cp-kafka:latest + image: confluentinc/cp-kafka:7.7.1 container_name: zkafka-broker depends_on: - zookeeper diff --git a/formatter.go b/formatter.go index 5c95692..ce77959 100644 --- a/formatter.go +++ b/formatter.go @@ -24,7 +24,7 @@ const ( JSONSchemaRegistry zfmt.FormatterType = "json_schema_registry" ) -var errMissingFmtter = errors.New("custom formatter is missing, did you forget to call WithFormatter()") +var errMissingFormatter = errors.New("custom formatter is missing, did you forget to call WithFormatter()") // Formatter allows the user to extend formatting capability to unsupported data types type Formatter interface { @@ -83,12 +83,12 @@ type errFormatter struct{} // marshall returns error with reminder func (f errFormatter) marshall(req marshReq) ([]byte, error) { - return nil, errMissingFmtter + return nil, errMissingFormatter } // unmarshal returns error with reminder func (f errFormatter) unmarshal(req unmarshReq) error { - return errMissingFmtter + return errMissingFormatter } type avroSchemaRegistryFormatter struct { diff --git a/formatter_test.go b/formatter_test.go index d36bd1a..8c464b1 100644 --- a/formatter_test.go +++ b/formatter_test.go @@ -8,11 +8,11 @@ import ( func TestNoopFormatter_Marshall_Unmarshal(t *testing.T) { defer recoverThenFail(t) - fmtter := errFormatter{} - _, err := fmtter.marshall(marshReq{subject: "anything"}) - require.ErrorIs(t, err, errMissingFmtter) + formatter := errFormatter{} + _, err := formatter.marshall(marshReq{subject: "anything"}) + require.ErrorIs(t, err, errMissingFormatter) var someInt int32 - err = fmtter.unmarshal(unmarshReq{data: []byte("test"), target: &someInt}) - require.ErrorIs(t, err, errMissingFmtter) + err = formatter.unmarshal(unmarshReq{data: []byte("test"), target: &someInt}) + require.ErrorIs(t, err, errMissingFormatter) } diff --git a/message_test.go b/message_test.go index 660c818..bad5441 100644 --- a/message_test.go +++ b/message_test.go @@ -26,7 +26,7 @@ func Test_makeProducerMessageRaw(t *testing.T) { hasHeaders bool }{ { - name: "has fmtter with valid input, no key, no partition", + name: "has formatter with valid input, no key, no partition", args: args{ serviceName: "concierge/test/test_group", topic: "test_topic", diff --git a/reader.go b/reader.go index d203979..3ccd108 100644 --- a/reader.go +++ b/reader.go @@ -43,7 +43,7 @@ type KReader struct { topicConfig ConsumerTopicConfig isClosed bool - fmtter kFormatter + formatter kFormatter logger Logger lifecycle LifecycleHooks @@ -83,7 +83,7 @@ func newReader(args readerArgs) (*KReader, error) { r := &KReader{ consumer: consumer, topicConfig: topicConfig, - fmtter: formatter, + formatter: formatter, logger: logger, lifecycle: args.hooks, tCommitMgr: newTopicCommitMgr(), @@ -92,8 +92,8 @@ func newReader(args readerArgs) (*KReader, error) { for _, opt := range args.opts { opt(&s) } - if s.fmtter != nil { - r.fmtter = s.fmtter + if s.formatter != nil { + r.formatter = s.formatter } return r, nil } @@ -226,7 +226,7 @@ func (r *KReader) mapMessage(_ context.Context, msg kafka.Message) *Message { } }, value: msg.Value, - fmt: r.fmtter, + fmt: r.formatter, } } @@ -313,17 +313,17 @@ func getTopicName(topicName *string) string { } type ReaderSettings struct { - fmtter kFormatter + formatter kFormatter } // ReaderOption is a function that modify the KReader configurations type ReaderOption func(*ReaderSettings) // RFormatterOption sets the formatter for this reader -func RFormatterOption(fmtter Formatter) ReaderOption { +func RFormatterOption(formatter Formatter) ReaderOption { return func(s *ReaderSettings) { - if fmtter != nil { - s.fmtter = zfmtShim{F: fmtter} + if formatter != nil { + s.formatter = zfmtShim{F: formatter} } } } diff --git a/writer.go b/writer.go index 0e525e2..4b706a1 100644 --- a/writer.go +++ b/writer.go @@ -46,7 +46,7 @@ type KWriter struct { mu sync.Mutex producer KafkaProducer topicConfig ProducerTopicConfig - fmtter kFormatter + formatter kFormatter logger Logger tracer trace.Tracer p propagation.TextMapPropagator @@ -90,7 +90,7 @@ func newWriter(args writerArgs) (*KWriter, error) { w := &KWriter{ producer: p, topicConfig: topicConfig, - fmtter: formatter, + formatter: formatter, logger: args.l, tracer: args.t, p: args.p, @@ -101,7 +101,7 @@ func newWriter(args writerArgs) (*KWriter, error) { opt(&s) } if s.f != nil { - w.fmtter = s.f + w.formatter = s.f } return w, nil } @@ -229,10 +229,10 @@ func (w *KWriter) write(ctx context.Context, msg keyValuePair, opts ...WriteOpti } func (w *KWriter) marshall(_ context.Context, value any, schema string) ([]byte, error) { - if w.fmtter == nil { + if w.formatter == nil { return nil, errors.New("formatter or confluent formatter is not supplied to produce kafka message") } - return w.fmtter.marshall(marshReq{ + return w.formatter.marshall(marshReq{ topic: w.topicConfig.Topic, subject: value, schema: schema, diff --git a/writer_test.go b/writer_test.go index 1524ad6..dd56bb1 100644 --- a/writer_test.go +++ b/writer_test.go @@ -83,11 +83,11 @@ func TestWriter_Write(t *testing.T) { defer recoverThenFail(t) w := &KWriter{ - producer: tt.fields.Producer, - fmtter: tt.fields.fmt, - logger: NoopLogger{}, - tracer: noop.TracerProvider{}.Tracer(""), - p: propagation.TraceContext{}, + producer: tt.fields.Producer, + formatter: tt.fields.fmt, + logger: NoopLogger{}, + tracer: noop.TracerProvider{}.Tracer(""), + p: propagation.TraceContext{}, } got, err := w.Write(tt.args.ctx, tt.args.value) if tt.wantErr { @@ -164,7 +164,7 @@ func TestWriter_WriteKey(t *testing.T) { w := &KWriter{ producer: tt.fields.Producer, topicConfig: tt.fields.conf, - fmtter: zfmtShim{tt.fields.fmt}, + formatter: zfmtShim{tt.fields.fmt}, isClosed: tt.fields.isClosed, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), @@ -202,7 +202,7 @@ func TestWriter_WriteKeyReturnsImmediateError(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{}, isClosed: false, - fmtter: zfmtShim{&zfmt.JSONFormatter{}}, + formatter: zfmtShim{&zfmt.JSONFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -240,7 +240,7 @@ func TestWriter_WritesMetrics(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: zfmtShim{&zfmt.StringFormatter{}}, + formatter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -303,11 +303,11 @@ func TestWriter_WriteSpecialCase(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { w := &KWriter{ - producer: tt.fields.Producer, - fmtter: zfmtShim{tt.fields.fmt}, - logger: NoopLogger{}, - tracer: noop.TracerProvider{}.Tracer(""), - p: propagation.TraceContext{}, + producer: tt.fields.Producer, + formatter: zfmtShim{tt.fields.fmt}, + logger: NoopLogger{}, + tracer: noop.TracerProvider{}.Tracer(""), + p: propagation.TraceContext{}, } got, err := w.Write(tt.args.ctx, tt.args.value) if tt.wantErr { @@ -348,7 +348,7 @@ func TestWriter_PreWriteLifecycleHookCanAugmentHeaders(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: zfmtShim{&zfmt.StringFormatter{}}, + formatter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -377,7 +377,7 @@ func TestWriter_WithHeadersWriteOptionCanAugmentHeaders(t *testing.T) { wr := &KWriter{ producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, - fmtter: zfmtShim{&zfmt.StringFormatter{}}, + formatter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -435,7 +435,7 @@ func TestWriter_PreWriteLifecycleHookErrorDoesntHaltProcessing(t *testing.T) { producer: p, topicConfig: ProducerTopicConfig{Topic: "orange"}, lifecycle: hooks, - fmtter: zfmtShim{&zfmt.StringFormatter{}}, + formatter: zfmtShim{&zfmt.StringFormatter{}}, logger: NoopLogger{}, tracer: noop.TracerProvider{}.Tracer(""), p: propagation.TraceContext{}, @@ -566,7 +566,7 @@ func Test_newWriter(t *testing.T) { func TestWriter_WithOptions(t *testing.T) { recoverThenFail(t) w := &KWriter{} - require.Nil(t, w.fmtter, "expected nil formatter") + require.Nil(t, w.formatter, "expected nil formatter") settings := WriterSettings{} WFormatterOption(&zfmt.StringFormatter{})(&settings) From 7f115059c8ed32a85dd2bdf82a5fb523888b7668 Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 23 Sep 2024 09:41:38 -0700 Subject: [PATCH 33/34] Executed code cleanup --- client_test.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/client_test.go b/client_test.go index 98a262f..6c67584 100644 --- a/client_test.go +++ b/client_test.go @@ -180,7 +180,7 @@ func TestClient_Reader(t *testing.T) { fields: fields{ conf: Config{BootstrapServers: []string{"localhost:9092"}}, readers: map[string]*KReader{ - "test-config": &KReader{isClosed: true}, + "test-config": {isClosed: true}, }, consumerProvider: mockConfluentConsumerProvider{c: MockKafkaConsumer{ID: "stew"}}.NewConsumer, logger: NoopLogger{}, @@ -213,7 +213,7 @@ func TestClient_Reader(t *testing.T) { fields: fields{ conf: Config{BootstrapServers: []string{"localhost:9092"}}, readers: map[string]*KReader{ - "test-config": &KReader{isClosed: true}, + "test-config": {isClosed: true}, }, consumerProvider: mockConfluentConsumerProvider{c: MockKafkaConsumer{ID: "stew"}}.NewConsumer, logger: NoopLogger{}, @@ -253,7 +253,7 @@ func TestClient_Reader(t *testing.T) { name: "get from cache", fields: fields{ readers: map[string]*KReader{ - "test-config": &KReader{}, + "test-config": {}, }, }, args: args{ @@ -346,7 +346,7 @@ func TestClient_Writer(t *testing.T) { fields: fields{ conf: Config{BootstrapServers: []string{"localhost:9092"}}, writers: map[string]*KWriter{ - "test-id": &KWriter{isClosed: true}, + "test-id": {isClosed: true}, }, producerProvider: mockConfluentProducerProvider{}.NewProducer, logger: NoopLogger{}, @@ -376,7 +376,7 @@ func TestClient_Writer(t *testing.T) { fields: fields{ conf: Config{BootstrapServers: []string{"localhost:9092"}}, writers: map[string]*KWriter{ - "test-id": &KWriter{isClosed: true}, + "test-id": {isClosed: true}, }, producerProvider: mockConfluentProducerProvider{}.NewProducer, logger: NoopLogger{}, @@ -412,7 +412,7 @@ func TestClient_Writer(t *testing.T) { name: "get from cache", fields: fields{ writers: map[string]*KWriter{ - "test-id-topic": &KWriter{}, + "test-id-topic": {}, }, }, args: args{ @@ -502,8 +502,8 @@ func TestClient_Close(t *testing.T) { "r2": r2, }, writers: map[string]*KWriter{ - "w1": &KWriter{producer: p}, - "w2": &KWriter{producer: p}, + "w1": {producer: p}, + "w2": {producer: p}, }, }, }, From f0923ca6638596ea80bba332f37fc6ed95292cba Mon Sep 17 00:00:00 2001 From: stewartboyd119 Date: Mon, 23 Sep 2024 10:07:00 -0700 Subject: [PATCH 34/34] Corrected some typos --- .gitignore | 1 - changelog.md | 2 +- example/worker_avro/main.go | 3 --- formatter.go | 4 ++-- heap_test.go | 2 +- lifecycle.go | 2 +- test/integration_test.go | 1 + testhelper.go | 4 ++-- work.go | 2 +- 9 files changed, 9 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 6b401df..986e774 100644 --- a/.gitignore +++ b/.gitignore @@ -25,7 +25,6 @@ go.work.sum .idea/ .run/ zk-multiple-kafka-multiple/ -*.out *.res *.lsif *.prof diff --git a/changelog.md b/changelog.md index e1d69eb..7b62247 100644 --- a/changelog.md +++ b/changelog.md @@ -6,7 +6,7 @@ This project adheres to Semantic Versioning. ## 1.1.0 (Sep 22, 2024) -1. Added support for schemaregistry (avro, proto, json). Extended `zfmt.FormatterType` types to include `avro_schema_registry`, `proto_schema_registry` and `json_schema_registry` +1. Added support for schema registry (avro, proto, json). Extended `zfmt.FormatterType` types to include `avro_schema_registry`, `proto_schema_registry` and `json_schema_registry` 2. Added lifecycle function `LifecyclePostReadImmediate` 3. Added `workFactory.CreateWithFunc` which is a convenience work factory method for creating work using a callback instead of an interface (can reduce boilerplate) in some scenarios. 4. During the creation of readers/writers an error is now returned if bootstrap servers is empty diff --git a/example/worker_avro/main.go b/example/worker_avro/main.go index 8547625..66c185e 100644 --- a/example/worker_avro/main.go +++ b/example/worker_avro/main.go @@ -13,9 +13,6 @@ import ( "github.com/zillow/zkafka" ) -//go:embed dummy_event.avsc -var dummyEventSchema string - // Demonstrates reading from a topic via the zkafka.Work struct which is more convenient, typically, than using the consumer directly func main() { ctx := context.Background() diff --git a/formatter.go b/formatter.go index ce77959..a8b55d2 100644 --- a/formatter.go +++ b/formatter.go @@ -82,12 +82,12 @@ func (f zfmtShim) unmarshal(req unmarshReq) error { type errFormatter struct{} // marshall returns error with reminder -func (f errFormatter) marshall(req marshReq) ([]byte, error) { +func (f errFormatter) marshall(_ marshReq) ([]byte, error) { return nil, errMissingFormatter } // unmarshal returns error with reminder -func (f errFormatter) unmarshal(req unmarshReq) error { +func (f errFormatter) unmarshal(_ unmarshReq) error { return errMissingFormatter } diff --git a/heap_test.go b/heap_test.go index 2149110..36d7313 100644 --- a/heap_test.go +++ b/heap_test.go @@ -106,7 +106,7 @@ func Test_offsetHeap_SeekPop_DoesntImpactHeapOrdering(t *testing.T) { } got := heap.Pop() want := offsets[i] - require.Equal(t, want, got, "Expect pop to still pop minumums even after seek pops") + require.Equal(t, want, got, "Expect pop to still pop minimums even after seek pops") i++ } } diff --git a/lifecycle.go b/lifecycle.go index 86156e2..44bd69e 100644 --- a/lifecycle.go +++ b/lifecycle.go @@ -57,7 +57,7 @@ type LifecycleHooks struct { // Called by work after reading a message (guaranteed non nil), offers the ability to customize the context object (resulting context object passed to work processor) PostRead func(ctx context.Context, meta LifecyclePostReadMeta) (context.Context, error) - // Called by work immediatedly after an attempt to read a message. Msg might be nil, if there was an error + // Called by work immediately after an attempt to read a message. Msg might be nil, if there was an error // or no available messages. PostReadImmediate func(ctx context.Context, meta LifecyclePostReadImmediateMeta) diff --git a/test/integration_test.go b/test/integration_test.go index c591de9..40552f4 100644 --- a/test/integration_test.go +++ b/test/integration_test.go @@ -918,6 +918,7 @@ func Test_KafkaClientsCanWriteToTheirDeadLetterTopic(t *testing.T) { Topic: topic, Formatter: zfmt.JSONFmt, }) + require.NoError(t, err) consumerTopicConfig := zkafka.ConsumerTopicConfig{ ClientID: fmt.Sprintf("worker-%s-%s", t.Name(), uuid.NewString()), diff --git a/testhelper.go b/testhelper.go index c2a2040..a65e6b4 100644 --- a/testhelper.go +++ b/testhelper.go @@ -100,11 +100,11 @@ type FakeClient struct { W Writer } -func (f FakeClient) Reader(ctx context.Context, topicConfig ConsumerTopicConfig, opts ...ReaderOption) (Reader, error) { +func (f FakeClient) Reader(_ context.Context, _ ConsumerTopicConfig, _ ...ReaderOption) (Reader, error) { return f.R, nil } -func (f FakeClient) Writer(ctx context.Context, topicConfig ProducerTopicConfig, opts ...WriterOption) (Writer, error) { +func (f FakeClient) Writer(_ context.Context, _ ProducerTopicConfig, _ ...WriterOption) (Writer, error) { return f.W, nil } diff --git a/work.go b/work.go index ded5107..cf08523 100644 --- a/work.go +++ b/work.go @@ -172,7 +172,7 @@ func (w *Work) execProcessors(ctx context.Context, shutdown <-chan struct{}) { // initiateProcessors creates a buffered channel for each virtual partition, of size poolSize. That way // a particular virtual partition never blocks because of its own capacity issue (and instead the goroutinepool is used -// to limit indefinte growth of processing goroutines). +// to limit indefinite growth of processing goroutines). func (w *Work) initiateProcessors(_ context.Context) { poolSize := w.getPoolSize() w.virtualPartitions = make([]chan workUnit, poolSize)