From dc5c27025c225ccfe7a913567b2dbd17376f10b6 Mon Sep 17 00:00:00 2001 From: Aleksandr Razumov Date: Sun, 19 Jun 2022 01:01:36 +0300 Subject: [PATCH] feat(proto): total Date, DateTime refactor --- block_fuzz_test.go | 4 +- cht/cht_test.go | 4 +- example_test.go | 8 +- internal/cmd/ch-insert-lag/main.go | 17 ++-- otel_test.go | 2 +- .../ch-gen-col/{infer.tpl => infer.go.tmpl} | 0 proto/cmd/ch-gen-col/main.go | 53 +++++++---- .../cmd/ch-gen-col/{main.tpl => main.go.tmpl} | 22 +++-- .../cmd/ch-gen-col/{safe.tpl => safe.go.tmpl} | 35 ++++--- .../cmd/ch-gen-col/{test.tpl => test.go.tmpl} | 18 ++++ .../ch-gen-col/{unsafe.tpl => unsafe.go.tmpl} | 18 +++- proto/col_auto.go | 2 +- proto/col_date.go | 39 ++++++++ proto/col_date32.go | 39 ++++++++ proto/col_date32_gen.go | 44 +-------- proto/col_date32_gen_test.go | 42 +-------- proto/col_date32_safe_gen.go | 9 +- proto/col_date32_unsafe_gen.go | 7 +- proto/col_date_gen.go | 44 +-------- proto/col_date_gen_test.go | 42 +-------- proto/col_date_safe_gen.go | 9 +- proto/col_date_unsafe_gen.go | 7 +- proto/col_datetime.go | 93 +++++++++++++++++++ proto/col_datetime64.go | 71 ++++++++++++++ proto/col_datetime64_auto.go | 47 ---------- proto/col_datetime64_gen.go | 66 ------------- proto/col_datetime64_gen_test.go | 46 +-------- proto/col_datetime64_safe_gen.go | 13 +-- proto/col_datetime64_unsafe_gen.go | 11 ++- proto/col_datetime_gen.go | 66 ------------- proto/col_datetime_gen_test.go | 46 +-------- proto/col_datetime_safe_gen.go | 13 +-- proto/col_datetime_unsafe_gen.go | 11 ++- proto/col_decimal128_gen.go | 20 ++-- proto/col_decimal128_gen_test.go | 3 +- proto/col_decimal128_safe_gen.go | 9 +- proto/col_decimal128_unsafe_gen.go | 7 +- proto/col_decimal256_gen.go | 20 ++-- proto/col_decimal256_gen_test.go | 3 +- proto/col_decimal256_safe_gen.go | 9 +- proto/col_decimal256_unsafe_gen.go | 7 +- proto/col_decimal32_gen.go | 20 ++-- proto/col_decimal32_gen_test.go | 3 +- proto/col_decimal32_safe_gen.go | 9 +- proto/col_decimal32_unsafe_gen.go | 7 +- proto/col_decimal64_gen.go | 20 ++-- proto/col_decimal64_gen_test.go | 3 +- proto/col_decimal64_safe_gen.go | 9 +- proto/col_decimal64_unsafe_gen.go | 7 +- proto/col_enum16_gen.go | 20 ++-- proto/col_enum16_gen_test.go | 3 +- proto/col_enum16_safe_gen.go | 9 +- proto/col_enum16_unsafe_gen.go | 7 +- proto/col_enum8_gen.go | 20 ++-- proto/col_enum8_gen_test.go | 3 +- proto/col_enum8_safe_gen.go | 9 +- proto/col_enum8_unsafe_gen.go | 7 +- proto/col_float32_gen.go | 20 ++-- proto/col_float32_gen_test.go | 3 +- proto/col_float32_safe_gen.go | 9 +- proto/col_float32_unsafe_gen.go | 7 +- proto/col_float64_gen.go | 20 ++-- proto/col_float64_gen_test.go | 3 +- proto/col_float64_safe_gen.go | 9 +- proto/col_float64_unsafe_gen.go | 7 +- proto/col_int128_gen.go | 20 ++-- proto/col_int128_gen_test.go | 3 +- proto/col_int128_safe_gen.go | 9 +- proto/col_int128_unsafe_gen.go | 7 +- proto/col_int16_gen.go | 20 ++-- proto/col_int16_gen_test.go | 3 +- proto/col_int16_safe_gen.go | 9 +- proto/col_int16_unsafe_gen.go | 7 +- proto/col_int256_gen.go | 20 ++-- proto/col_int256_gen_test.go | 3 +- proto/col_int256_safe_gen.go | 9 +- proto/col_int256_unsafe_gen.go | 7 +- proto/col_int32_gen.go | 20 ++-- proto/col_int32_gen_test.go | 3 +- proto/col_int32_safe_gen.go | 9 +- proto/col_int32_unsafe_gen.go | 7 +- proto/col_int64_gen.go | 20 ++-- proto/col_int64_gen_test.go | 3 +- proto/col_int64_safe_gen.go | 9 +- proto/col_int64_unsafe_gen.go | 7 +- proto/col_int8_gen.go | 20 ++-- proto/col_int8_gen_test.go | 3 +- proto/col_int8_safe_gen.go | 9 +- proto/col_int8_unsafe_gen.go | 7 +- proto/col_ipv4_gen.go | 20 ++-- proto/col_ipv4_gen_test.go | 3 +- proto/col_ipv4_safe_gen.go | 9 +- proto/col_ipv4_unsafe_gen.go | 7 +- proto/col_ipv6_gen.go | 20 ++-- proto/col_ipv6_gen_test.go | 3 +- proto/col_ipv6_safe_gen.go | 9 +- proto/col_ipv6_unsafe_gen.go | 7 +- proto/col_uint128_gen.go | 20 ++-- proto/col_uint128_gen_test.go | 3 +- proto/col_uint128_safe_gen.go | 9 +- proto/col_uint128_unsafe_gen.go | 7 +- proto/col_uint16_gen.go | 20 ++-- proto/col_uint16_gen_test.go | 3 +- proto/col_uint16_safe_gen.go | 9 +- proto/col_uint16_unsafe_gen.go | 7 +- proto/col_uint256_gen.go | 20 ++-- proto/col_uint256_gen_test.go | 3 +- proto/col_uint256_safe_gen.go | 9 +- proto/col_uint256_unsafe_gen.go | 7 +- proto/col_uint32_gen.go | 20 ++-- proto/col_uint32_gen_test.go | 3 +- proto/col_uint32_safe_gen.go | 9 +- proto/col_uint32_unsafe_gen.go | 7 +- proto/col_uint64_gen.go | 20 ++-- proto/col_uint64_gen_test.go | 3 +- proto/col_uint64_safe_gen.go | 9 +- proto/col_uint64_unsafe_gen.go | 7 +- proto/col_uint8_gen.go | 20 ++-- proto/col_uint8_gen_test.go | 3 +- proto/col_uint8_safe_gen.go | 5 +- proto/datetime64_test.go | 7 -- proto/profile_events.go | 2 +- proto/server_log.go | 2 +- query_test.go | 27 ++---- 124 files changed, 896 insertions(+), 937 deletions(-) rename proto/cmd/ch-gen-col/{infer.tpl => infer.go.tmpl} (100%) rename proto/cmd/ch-gen-col/{main.tpl => main.go.tmpl} (98%) rename proto/cmd/ch-gen-col/{safe.tpl => safe.go.tmpl} (78%) rename proto/cmd/ch-gen-col/{test.tpl => test.go.tmpl} (88%) rename proto/cmd/ch-gen-col/{unsafe.tpl => unsafe.go.tmpl} (74%) create mode 100644 proto/col_date.go create mode 100644 proto/col_date32.go create mode 100644 proto/col_datetime.go create mode 100644 proto/col_datetime64.go delete mode 100644 proto/col_datetime64_auto.go delete mode 100644 proto/col_datetime64_gen.go delete mode 100644 proto/col_datetime_gen.go diff --git a/block_fuzz_test.go b/block_fuzz_test.go index ae6c58ce..f7accf90 100644 --- a/block_fuzz_test.go +++ b/block_fuzz_test.go @@ -188,9 +188,9 @@ func FuzzDecodeBlockAuto(f *testing.F) { {}, {100}, }), - proto.ColDateTime64Auto{ + proto.ColDateTime64{ Precision: 9, - ColDateTime64: proto.ColDateTime64{ + Data: []proto.DateTime64{ 1, 2, 3, }, }, diff --git a/cht/cht_test.go b/cht/cht_test.go index 3cd268c3..a8c12844 100644 --- a/cht/cht_test.go +++ b/cht/cht_test.go @@ -270,7 +270,9 @@ ENGINE = Distributed('nexus', default, hits, rand())`, { Name: "EventDate", Data: proto.ColDateTime{ - proto.ToDateTime(time.Now()), + Data: []proto.DateTime{ + proto.ToDateTime(time.Now()), + }, }, }, { diff --git a/example_test.go b/example_test.go index 7053cd1e..5a912689 100644 --- a/example_test.go +++ b/example_test.go @@ -10,25 +10,25 @@ import ( func ExampleQuery_multipleInputColumns() { var ( body proto.ColStr - timestamp proto.ColDateTime64 name proto.ColStr sevText proto.ColEnum sevNumber proto.ColUInt8 + ts = new(proto.ColDateTime64).WithPrecision(proto.PrecisionNano) arr = new(proto.ColStr).Array() // Array(String) now = time.Date(2010, 1, 1, 10, 22, 33, 345678, time.UTC) ) // Append 10 rows. for i := 0; i < 10; i++ { body.AppendBytes([]byte("Hello")) - timestamp = append(timestamp, proto.ToDateTime64(now, proto.PrecisionNano)) + ts.Append(now) name.Append("name") sevText.Values = append(sevText.Values, "INFO") sevNumber = append(sevNumber, 10) arr.Append([]string{"foo", "bar", "baz"}) } input := proto.Input{ - {Name: "timestamp", Data: timestamp.Wrap(proto.PrecisionNano)}, + {Name: "ts", Data: ts}, {Name: "severity_text", Data: &sevText}, {Name: "severity_number", Data: sevNumber}, {Name: "body", Data: body}, @@ -38,5 +38,5 @@ func ExampleQuery_multipleInputColumns() { fmt.Println(input.Into("logs")) // Output: - // INSERT INTO "logs" ("timestamp","severity_text","severity_number","body","name","arr") VALUES + // INSERT INTO "logs" ("ts","severity_text","severity_number","body","name","arr") VALUES } diff --git a/internal/cmd/ch-insert-lag/main.go b/internal/cmd/ch-insert-lag/main.go index fe443765..bce5e062 100644 --- a/internal/cmd/ch-insert-lag/main.go +++ b/internal/cmd/ch-insert-lag/main.go @@ -38,11 +38,13 @@ func main() { return errors.Wrap(err, "create") } close(ready) - data := make(proto.ColDateTime64, 50_000) + data := proto.ColDateTime64{ + Data: make([]proto.DateTime64, 50_000), + } fill := func() { now := proto.ToDateTime64(time.Now(), precision) - for i := range data { - data[i] = now + for i := range data.Data { + data.Data[i] = now } } fill() @@ -89,15 +91,14 @@ func main() { }); err != nil { return errors.Wrap(err, "select") } - if len(data) == 0 { + if data.Rows() == 0 { continue } - v := data[0] - if v == 0 { + v := data.Row(0) + if v.IsZero() { continue } - latest := v.Time(precision) - lag := time.Since(latest) + lag := time.Since(v) fmt.Println(lag.Round(time.Millisecond)) } return nil diff --git a/otel_test.go b/otel_test.go index a627489c..01794907 100644 --- a/otel_test.go +++ b/otel_test.go @@ -91,7 +91,7 @@ type OTELRow struct { func (t *OTEL) Append(row OTELRow) { t.Body.AppendBytes(row.Body) - t.Timestamp.Append(proto.DateTime64(row.Timestamp)) + t.Timestamp.Append(proto.DateTime64(row.Timestamp).Time(proto.PrecisionNano)) t.SevNumber.Append(row.SeverityNumber) t.SevText.Append(row.SeverityText) diff --git a/proto/cmd/ch-gen-col/infer.tpl b/proto/cmd/ch-gen-col/infer.go.tmpl similarity index 100% rename from proto/cmd/ch-gen-col/infer.tpl rename to proto/cmd/ch-gen-col/infer.go.tmpl diff --git a/proto/cmd/ch-gen-col/main.go b/proto/cmd/ch-gen-col/main.go index c3ecd545..6a21166c 100644 --- a/proto/cmd/ch-gen-col/main.go +++ b/proto/cmd/ch-gen-col/main.go @@ -146,6 +146,27 @@ func (v Variant) ElemLower() string { return strings.ToLower(v.ElemType()) } +func (v Variant) Complex() bool { + return v.Time() +} + +func (v Variant) Time() bool { + switch v.Kind { + case KindDate, KindDateTime: + return true + default: + return false + } +} + +func (v Variant) Date() bool { + return v.Kind == KindDate +} + +func (v Variant) DateTime() bool { + return v.Kind == KindDateTime +} + func (v Variant) ElemType() string { if v.Kind == KindEnum { return fmt.Sprintf("Enum%d", v.Bits) @@ -193,20 +214,18 @@ func (v Variant) ElemType() string { return b.String() } -//go:embed main.tpl -var mainTemplate string - -//go:embed test.tpl -var testTemplate string - -//go:embed infer.tpl -var inferTemplate string - -//go:embed safe.tpl -var safeTemplate string - -//go:embed unsafe.tpl -var unsafeTemplate string +var ( + //go:embed main.go.tmpl + mainTemplate string + //go:embed test.go.tmpl + testTemplate string + //go:embed infer.go.tmpl + inferTemplate string + //go:embed safe.go.tmpl + safeTemplate string + //go:embed unsafe.go.tmpl + unsafeTemplate string +) func write(name string, v interface{}, t *template.Template) error { out := new(bytes.Buffer) @@ -322,8 +341,10 @@ func run() error { v.GenerateUnsafe = true } base := "col_" + v.ElemLower() - if err := write(base+"_gen", v, tpl); err != nil { - return errors.Wrap(err, "write") + if !v.DateTime() { + if err := write(base+"_gen", v, tpl); err != nil { + return errors.Wrap(err, "write") + } } if err := write(base+"_safe_gen", v, tplSafe); err != nil { return errors.Wrap(err, "write") diff --git a/proto/cmd/ch-gen-col/main.tpl b/proto/cmd/ch-gen-col/main.go.tmpl similarity index 98% rename from proto/cmd/ch-gen-col/main.tpl rename to proto/cmd/ch-gen-col/main.go.tmpl index 6177d7eb..a8cf66de 100644 --- a/proto/cmd/ch-gen-col/main.tpl +++ b/proto/cmd/ch-gen-col/main.go.tmpl @@ -13,16 +13,22 @@ var ( _ Column = (*{{ .Type }})(nil) ) -// Type returns ColumnType of {{ .Name }}. -func ({{ .Type }}) Type() ColumnType { - return {{ .ColumnType }} -} - // Rows returns count of rows in column. func (c {{ .Type }}) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *{{ .Type }}) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of {{ .Name }}. +func ({{ .Type }}) Type() ColumnType { + return {{ .ColumnType }} +} + +{{ if not .Time }} // Row returns i-th row of column. func (c {{ .Type }}) Row(i int) {{ .ElemType }} { return c[i] @@ -33,10 +39,6 @@ func (c *{{ .Type }}) Append(v {{ .ElemType }}) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *{{ .Type }}) Reset() { - *c = (*c)[:0] -} // LowCardinality returns LowCardinality for {{ .Name }} . func (c *{{ .Type }}) LowCardinality() *ColLowCardinality[{{ .ElemType }}] { @@ -65,3 +67,5 @@ func NewArr{{ .Name }}() *ColArr[{{ .ElemType }}] { Data: new({{ .Type }}), } } + +{{ end }} diff --git a/proto/cmd/ch-gen-col/safe.tpl b/proto/cmd/ch-gen-col/safe.go.tmpl similarity index 78% rename from proto/cmd/ch-gen-col/safe.tpl rename to proto/cmd/ch-gen-col/safe.go.tmpl index e5323b55..6a3e08c9 100644 --- a/proto/cmd/ch-gen-col/safe.tpl +++ b/proto/cmd/ch-gen-col/safe.go.tmpl @@ -40,7 +40,11 @@ func (c *{{ .Type }}) DecodeColumn(r *Reader, rows int) error { } *c = v {{- else }} + {{- if .DateTime }} + v := c.Data + {{- else }} v := *c + {{- end }} // Move bound check out of loop. // // See https://github.com/golang/go/issues/30945. @@ -54,39 +58,48 @@ func (c *{{ .Type }}) DecodeColumn(r *Reader, rows int) error { {{- else }} {{ .BinGet }}(data[i : i+size]), {{- end }} - ) + ) } + {{- if .DateTime }} + c.Data = v + {{- else }} *c = v {{- end }} + {{- end }} return nil } // EncodeColumn encodes {{ .Name }} rows to *Buffer. func (c {{ .Type }}) EncodeColumn(b *Buffer) { - if len(c) == 0 { + {{- if .DateTime }} + v := c.Data + {{- else }} + v := c + {{- end }} + if len(v) == 0 { return } {{- if .Byte }} - b.Buf = append(b.Buf, c...) + b.Buf = append(b.Buf, v...) {{- else if .SingleByte }} start := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, len(c))...) - for i := range c { - b.Buf[i+start] = {{ .UnsignedType }}(c[i]) + b.Buf = append(b.Buf, make([]byte, len(v))...) + for i := range v { + b.Buf[i+start] = {{ .UnsignedType }}(v[i]) } {{- else }} const size = {{ .Bits }} / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { {{ .BinPut }}( b.Buf[offset : offset+size], {{- if .IsFloat }} - math.{{ .Name }}bits(v), + math.{{ .Name }}bits(vv), {{- else if .Cast }} - {{ .UnsignedType }}(v), + {{ .UnsignedType }}(vv), {{- else }} - v, + vv, {{- end }} ) offset += size diff --git a/proto/cmd/ch-gen-col/test.tpl b/proto/cmd/ch-gen-col/test.go.tmpl similarity index 88% rename from proto/cmd/ch-gen-col/test.tpl rename to proto/cmd/ch-gen-col/test.go.tmpl index 1ae329d4..269e417c 100644 --- a/proto/cmd/ch-gen-col/test.tpl +++ b/proto/cmd/ch-gen-col/test.go.tmpl @@ -18,9 +18,15 @@ func Test{{ .Type }}_DecodeColumn(t *testing.T) { const rows = 50 var data {{ .Type }} for i := 0; i < rows; i++ { + {{- if .DateTime }} + data.Data = append(data.Data, {{ .New }}(i)) + {{- else if .Date }} + data = append(data, {{ .New }}(i)) + {{- else }} v := {{ .New }}(i) data.Append(v) require.Equal(t, v, data.Row(i)) + {{- end }} } var buf Buffer @@ -39,7 +45,9 @@ func Test{{ .Type }}_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + {{ if not .Time }} require.Equal(t, {{ .ColumnType }}, dec.Type()) + {{ end }} }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -63,6 +71,7 @@ func Test{{ .Type }}_DecodeColumn(t *testing.T) { }) } +{{- if not .Time }} func Test{{ .Type }}Array(t *testing.T) { const rows = 50 data := NewArr{{ .Name }}() @@ -98,12 +107,17 @@ func Test{{ .Type }}Array(t *testing.T) { require.ErrorIs(t, dec.DecodeColumn(r, rows), io.ErrUnexpectedEOF) }) } +{{ end }} func Benchmark{{ .Type }}_DecodeColumn(b *testing.B) { const rows = 1_000 var data {{ .Type }} for i := 0; i < rows; i++ { + {{- if .DateTime }} + data.Data = append(data.Data, {{ .New }}(i)) + {{- else -}} data = append(data, {{ .New }}(i)) + {{- end -}} } var buf Buffer @@ -135,7 +149,11 @@ func Benchmark{{ .Type }}_EncodeColumn(b *testing.B) { const rows = 1_000 var data {{ .Type }} for i := 0; i < rows; i++ { + {{- if .DateTime }} + data.Data = append(data.Data, {{ .New }}(i)) + {{- else -}} data = append(data, {{ .New }}(i)) + {{- end -}} } var buf Buffer diff --git a/proto/cmd/ch-gen-col/unsafe.tpl b/proto/cmd/ch-gen-col/unsafe.go.tmpl similarity index 74% rename from proto/cmd/ch-gen-col/unsafe.tpl rename to proto/cmd/ch-gen-col/unsafe.go.tmpl index 78731079..11a7b1cb 100644 --- a/proto/cmd/ch-gen-col/unsafe.tpl +++ b/proto/cmd/ch-gen-col/unsafe.go.tmpl @@ -15,8 +15,13 @@ func (c *{{ .Type }}) DecodeColumn(r *Reader, rows int) error { if rows == 0 { return nil } + {{- if .DateTime }} + c.Data = append(c.Data, make([]{{ .ElemType }}, rows)...) + s := *(*slice)(unsafe.Pointer(&c.Data)) + {{- else }} *c = append(*c, make([]{{ .ElemType }}, rows)...) s := *(*slice)(unsafe.Pointer(c)) + {{- end }} {{- if not .SingleByte }} const size = {{ .Bits }} / 8 s.Len *= size @@ -31,17 +36,22 @@ func (c *{{ .Type }}) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes {{ .Name }} rows to *Buffer. func (c {{ .Type }}) EncodeColumn(b *Buffer) { - if len(c) == 0 { + {{- if .DateTime }} + v := c.Data + {{- else }} + v := c + {{- end }} + if len(v) == 0 { return } offset := len(b.Buf) {{- if .SingleByte }} - b.Buf = append(b.Buf, make([]byte, len(c))...) + b.Buf = append(b.Buf, make([]byte, len(v))...) {{- else }} const size = {{ .Bits }} / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) {{- end }} - s := *(*slice)(unsafe.Pointer(&c)) + s := *(*slice)(unsafe.Pointer(&v)) {{- if not .SingleByte }} s.Len *= size s.Cap *= size diff --git a/proto/col_auto.go b/proto/col_auto.go index 3f1efb75..cec5ff74 100644 --- a/proto/col_auto.go +++ b/proto/col_auto.go @@ -54,7 +54,7 @@ func (c *ColAuto) Infer(t ColumnType) error { c.DataType = t return nil case ColumnTypeDateTime64: - v := new(ColDateTime64Auto) + v := new(ColDateTime64) if err := v.Infer(t); err != nil { return errors.Wrap(err, "datetime") } diff --git a/proto/col_date.go b/proto/col_date.go new file mode 100644 index 00000000..61d79e63 --- /dev/null +++ b/proto/col_date.go @@ -0,0 +1,39 @@ +package proto + +import "time" + +func (c *ColDate) Append(v time.Time) { + *c = append(*c, ToDate(v)) +} + +func (c ColDate) Row(i int) time.Time { + return c[i].Time() +} + +// LowCardinality returns LowCardinality for Enum8 . +func (c *ColDate) LowCardinality() *ColLowCardinality[time.Time] { + return &ColLowCardinality[time.Time]{ + index: c, + } +} + +// Array is helper that creates Array of Enum8. +func (c *ColDate) Array() *ColArr[time.Time] { + return &ColArr[time.Time]{ + Data: c, + } +} + +// Nullable is helper that creates Nullable(Enum8). +func (c *ColDate) Nullable() *ColNullable[time.Time] { + return &ColNullable[time.Time]{ + Values: c, + } +} + +// NewArrDate returns new Array(Date). +func NewArrDate() *ColArr[time.Time] { + return &ColArr[time.Time]{ + Data: new(ColDate), + } +} diff --git a/proto/col_date32.go b/proto/col_date32.go new file mode 100644 index 00000000..f47644c5 --- /dev/null +++ b/proto/col_date32.go @@ -0,0 +1,39 @@ +package proto + +import "time" + +func (c *ColDate32) Append(v time.Time) { + *c = append(*c, ToDate32(v)) +} + +func (c ColDate32) Row(i int) time.Time { + return c[i].Time() +} + +// LowCardinality returns LowCardinality for Enum8 . +func (c *ColDate32) LowCardinality() *ColLowCardinality[time.Time] { + return &ColLowCardinality[time.Time]{ + index: c, + } +} + +// Array is helper that creates Array of Enum8. +func (c *ColDate32) Array() *ColArr[time.Time] { + return &ColArr[time.Time]{ + Data: c, + } +} + +// Nullable is helper that creates Nullable(Enum8). +func (c *ColDate32) Nullable() *ColNullable[time.Time] { + return &ColNullable[time.Time]{ + Values: c, + } +} + +// NewArrDate32 returns new Array(Date32). +func NewArrDate32() *ColArr[time.Time] { + return &ColArr[time.Time]{ + Data: new(ColDate32), + } +} diff --git a/proto/col_date32_gen.go b/proto/col_date32_gen.go index c582edf4..c2e30046 100644 --- a/proto/col_date32_gen.go +++ b/proto/col_date32_gen.go @@ -12,55 +12,17 @@ var ( _ Column = (*ColDate32)(nil) ) -// Type returns ColumnType of Date32. -func (ColDate32) Type() ColumnType { - return ColumnTypeDate32 -} - // Rows returns count of rows in column. func (c ColDate32) Rows() int { return len(c) } -// Row returns i-th row of column. -func (c ColDate32) Row(i int) Date32 { - return c[i] -} - -// Append Date32 to column. -func (c *ColDate32) Append(v Date32) { - *c = append(*c, v) -} - // Reset resets data in row, preserving capacity for efficiency. func (c *ColDate32) Reset() { *c = (*c)[:0] } -// LowCardinality returns LowCardinality for Date32 . -func (c *ColDate32) LowCardinality() *ColLowCardinality[Date32] { - return &ColLowCardinality[Date32]{ - index: c, - } -} - -// Array is helper that creates Array of Date32. -func (c *ColDate32) Array() *ColArr[Date32] { - return &ColArr[Date32]{ - Data: c, - } -} - -// Nullable is helper that creates Nullable(Date32). -func (c *ColDate32) Nullable() *ColNullable[Date32] { - return &ColNullable[Date32]{ - Values: c, - } -} - -// NewArrDate32 returns new Array(Date32). -func NewArrDate32() *ColArr[Date32] { - return &ColArr[Date32]{ - Data: new(ColDate32), - } +// Type returns ColumnType of Date32. +func (ColDate32) Type() ColumnType { + return ColumnTypeDate32 } diff --git a/proto/col_date32_gen_test.go b/proto/col_date32_gen_test.go index 74853fe4..2ae97dcb 100644 --- a/proto/col_date32_gen_test.go +++ b/proto/col_date32_gen_test.go @@ -17,9 +17,7 @@ func TestColDate32_DecodeColumn(t *testing.T) { const rows = 50 var data ColDate32 for i := 0; i < rows; i++ { - v := Date32(i) - data.Append(v) - require.Equal(t, v, data.Row(i)) + data = append(data, Date32(i)) } var buf Buffer @@ -38,7 +36,7 @@ func TestColDate32_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) - require.Equal(t, ColumnTypeDate32, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -62,42 +60,6 @@ func TestColDate32_DecodeColumn(t *testing.T) { }) } -func TestColDate32Array(t *testing.T) { - const rows = 50 - data := NewArrDate32() - for i := 0; i < rows; i++ { - data.Append([]Date32{ - Date32(i), - Date32(i + 1), - Date32(i + 2), - }) - } - - var buf Buffer - data.EncodeColumn(&buf) - t.Run("Golden", func(t *testing.T) { - gold.Bytes(t, buf.Buf, "col_arr_date32") - }) - t.Run("Ok", func(t *testing.T) { - br := bytes.NewReader(buf.Buf) - r := NewReader(br) - - dec := NewArrDate32() - require.NoError(t, dec.DecodeColumn(r, rows)) - require.Equal(t, data, dec) - require.Equal(t, rows, dec.Rows()) - dec.Reset() - require.Equal(t, 0, dec.Rows()) - require.Equal(t, ColumnTypeDate32.Array(), dec.Type()) - }) - t.Run("ErrUnexpectedEOF", func(t *testing.T) { - r := NewReader(bytes.NewReader(nil)) - - dec := NewArrDate32() - require.ErrorIs(t, dec.DecodeColumn(r, rows), io.ErrUnexpectedEOF) - }) -} - func BenchmarkColDate32_DecodeColumn(b *testing.B) { const rows = 1_000 var data ColDate32 diff --git a/proto/col_date32_safe_gen.go b/proto/col_date32_safe_gen.go index dcbdb629..1233a577 100644 --- a/proto/col_date32_safe_gen.go +++ b/proto/col_date32_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColDate32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Date32 rows to *Buffer. func (c ColDate32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 32 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint32( b.Buf[offset:offset+size], - uint32(v), + uint32(vv), ) offset += size } diff --git a/proto/col_date32_unsafe_gen.go b/proto/col_date32_unsafe_gen.go index 707e3140..21475ab1 100644 --- a/proto/col_date32_unsafe_gen.go +++ b/proto/col_date32_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColDate32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Date32 rows to *Buffer. func (c ColDate32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 32 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_date_gen.go b/proto/col_date_gen.go index 6b35cbfd..16447af2 100644 --- a/proto/col_date_gen.go +++ b/proto/col_date_gen.go @@ -12,55 +12,17 @@ var ( _ Column = (*ColDate)(nil) ) -// Type returns ColumnType of Date. -func (ColDate) Type() ColumnType { - return ColumnTypeDate -} - // Rows returns count of rows in column. func (c ColDate) Rows() int { return len(c) } -// Row returns i-th row of column. -func (c ColDate) Row(i int) Date { - return c[i] -} - -// Append Date to column. -func (c *ColDate) Append(v Date) { - *c = append(*c, v) -} - // Reset resets data in row, preserving capacity for efficiency. func (c *ColDate) Reset() { *c = (*c)[:0] } -// LowCardinality returns LowCardinality for Date . -func (c *ColDate) LowCardinality() *ColLowCardinality[Date] { - return &ColLowCardinality[Date]{ - index: c, - } -} - -// Array is helper that creates Array of Date. -func (c *ColDate) Array() *ColArr[Date] { - return &ColArr[Date]{ - Data: c, - } -} - -// Nullable is helper that creates Nullable(Date). -func (c *ColDate) Nullable() *ColNullable[Date] { - return &ColNullable[Date]{ - Values: c, - } -} - -// NewArrDate returns new Array(Date). -func NewArrDate() *ColArr[Date] { - return &ColArr[Date]{ - Data: new(ColDate), - } +// Type returns ColumnType of Date. +func (ColDate) Type() ColumnType { + return ColumnTypeDate } diff --git a/proto/col_date_gen_test.go b/proto/col_date_gen_test.go index fb91fa94..d6b9e3e7 100644 --- a/proto/col_date_gen_test.go +++ b/proto/col_date_gen_test.go @@ -17,9 +17,7 @@ func TestColDate_DecodeColumn(t *testing.T) { const rows = 50 var data ColDate for i := 0; i < rows; i++ { - v := Date(i) - data.Append(v) - require.Equal(t, v, data.Row(i)) + data = append(data, Date(i)) } var buf Buffer @@ -38,7 +36,7 @@ func TestColDate_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) - require.Equal(t, ColumnTypeDate, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -62,42 +60,6 @@ func TestColDate_DecodeColumn(t *testing.T) { }) } -func TestColDateArray(t *testing.T) { - const rows = 50 - data := NewArrDate() - for i := 0; i < rows; i++ { - data.Append([]Date{ - Date(i), - Date(i + 1), - Date(i + 2), - }) - } - - var buf Buffer - data.EncodeColumn(&buf) - t.Run("Golden", func(t *testing.T) { - gold.Bytes(t, buf.Buf, "col_arr_date") - }) - t.Run("Ok", func(t *testing.T) { - br := bytes.NewReader(buf.Buf) - r := NewReader(br) - - dec := NewArrDate() - require.NoError(t, dec.DecodeColumn(r, rows)) - require.Equal(t, data, dec) - require.Equal(t, rows, dec.Rows()) - dec.Reset() - require.Equal(t, 0, dec.Rows()) - require.Equal(t, ColumnTypeDate.Array(), dec.Type()) - }) - t.Run("ErrUnexpectedEOF", func(t *testing.T) { - r := NewReader(bytes.NewReader(nil)) - - dec := NewArrDate() - require.ErrorIs(t, dec.DecodeColumn(r, rows), io.ErrUnexpectedEOF) - }) -} - func BenchmarkColDate_DecodeColumn(b *testing.B) { const rows = 1_000 var data ColDate diff --git a/proto/col_date_safe_gen.go b/proto/col_date_safe_gen.go index e4d1e9a4..9dadd596 100644 --- a/proto/col_date_safe_gen.go +++ b/proto/col_date_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColDate) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Date rows to *Buffer. func (c ColDate) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 16 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint16( b.Buf[offset:offset+size], - uint16(v), + uint16(vv), ) offset += size } diff --git a/proto/col_date_unsafe_gen.go b/proto/col_date_unsafe_gen.go index 116ce920..3975805c 100644 --- a/proto/col_date_unsafe_gen.go +++ b/proto/col_date_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColDate) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Date rows to *Buffer. func (c ColDate) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 16 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_datetime.go b/proto/col_datetime.go new file mode 100644 index 00000000..0fd373ad --- /dev/null +++ b/proto/col_datetime.go @@ -0,0 +1,93 @@ +package proto + +import ( + "strings" + "time" + + "github.com/go-faster/errors" +) + +var ( + _ ColumnOf[time.Time] = (*ColDateTime)(nil) + _ Inferable = (*ColDateTime)(nil) +) + +// ColDateTime implements ColumnOf[time.Time]. +type ColDateTime struct { + Data []DateTime + Location *time.Location +} + +func (c *ColDateTime) Reset() { + c.Data = c.Data[:0] +} + +func (c ColDateTime) Rows() int { + return len(c.Data) +} + +func (c ColDateTime) Type() ColumnType { + if c.Location == nil { + return ColumnTypeDateTime + } + return ColumnTypeDateTime.With(`'` + c.Location.String() + `'`) +} + +func (c *ColDateTime) Infer(t ColumnType) error { + sub := t.Elem() + if sub == "" { + c.Location = nil + return nil + } + rawLoc := string(sub) + rawLoc = strings.Trim(rawLoc, `'`) + loc, err := time.LoadLocation(rawLoc) + if err != nil { + return errors.Wrap(err, "load location") + } + c.Location = loc + return nil +} + +func (c ColDateTime) loc() *time.Location { + if c.Location == nil { + return time.UTC + } + return c.Location +} + +func (c ColDateTime) Row(i int) time.Time { + return c.Data[i].Time().In(c.loc()) +} + +func (c *ColDateTime) Append(v time.Time) { + c.Data = append(c.Data, ToDateTime(v.In(c.loc()))) +} + +// LowCardinality returns LowCardinality for Enum8 . +func (c *ColDateTime) LowCardinality() *ColLowCardinality[time.Time] { + return &ColLowCardinality[time.Time]{ + index: c, + } +} + +// Array is helper that creates Array of Enum8. +func (c *ColDateTime) Array() *ColArr[time.Time] { + return &ColArr[time.Time]{ + Data: c, + } +} + +// Nullable is helper that creates Nullable(Enum8). +func (c *ColDateTime) Nullable() *ColNullable[time.Time] { + return &ColNullable[time.Time]{ + Values: c, + } +} + +// NewArrDateTime returns new Array(DateTime). +func NewArrDateTime() *ColArr[time.Time] { + return &ColArr[time.Time]{ + Data: &ColDateTime{}, + } +} diff --git a/proto/col_datetime64.go b/proto/col_datetime64.go new file mode 100644 index 00000000..48c87356 --- /dev/null +++ b/proto/col_datetime64.go @@ -0,0 +1,71 @@ +package proto + +import ( + "strconv" + "time" + + "github.com/go-faster/errors" +) + +var ( + _ ColumnOf[time.Time] = (*ColDateTime64)(nil) + _ Inferable = (*ColDateTime64)(nil) + _ Column = (*ColDateTime64)(nil) +) + +// ColDateTime64 implements ColumnOf[time.Time]. +type ColDateTime64 struct { + Data []DateTime64 + Precision Precision + Location *time.Location +} + +func (c *ColDateTime64) WithPrecision(p Precision) *ColDateTime64 { + c.Precision = p + return c +} + +func (c *ColDateTime64) WithLocation(loc *time.Location) *ColDateTime64 { + c.Location = loc + return c +} + +func (c ColDateTime64) Rows() int { + return len(c.Data) +} + +func (c *ColDateTime64) Reset() { + c.Data = c.Data[:0] +} + +func (c ColDateTime64) Type() ColumnType { + sub := ColumnType(strconv.Itoa(int(c.Precision))) + return ColumnTypeDateTime64.Sub(sub) +} + +func (c *ColDateTime64) Infer(t ColumnType) error { + // TODO(ernado): handle (ignore) timezone + pRaw := t.Elem() + n, err := strconv.ParseUint(string(pRaw), 10, 8) + if err != nil { + return errors.Wrap(err, "parse precision") + } + p := Precision(n) + if !p.Valid() { + return errors.Errorf("precision %d is invalid", n) + } + c.Precision = p + return nil +} + +func (c ColDateTime64) Row(i int) time.Time { + loc := time.UTC + if c.Location != nil { + loc = c.Location + } + return c.Data[i].Time(c.Precision).In(loc) +} + +func (c *ColDateTime64) Append(v time.Time) { + c.Data = append(c.Data, ToDateTime64(v, c.Precision)) +} diff --git a/proto/col_datetime64_auto.go b/proto/col_datetime64_auto.go deleted file mode 100644 index fb3654c9..00000000 --- a/proto/col_datetime64_auto.go +++ /dev/null @@ -1,47 +0,0 @@ -package proto - -import ( - "strconv" - "time" - - "github.com/go-faster/errors" -) - -var ( - _ ColumnOf[time.Time] = (*ColDateTime64Auto)(nil) - _ Inferable = (*ColDateTime64Auto)(nil) -) - -// ColDateTime64Auto implements ColumnOf[time.Time]. -type ColDateTime64Auto struct { - ColDateTime64 - Precision Precision -} - -func (c ColDateTime64Auto) Type() ColumnType { - sub := ColumnType(strconv.Itoa(int(c.Precision))) - return ColumnTypeDateTime64.Sub(sub) -} - -func (c *ColDateTime64Auto) Infer(t ColumnType) error { - // TODO(ernado): handle (ignore) timezone - pRaw := t.Elem() - n, err := strconv.ParseUint(string(pRaw), 10, 8) - if err != nil { - return errors.Wrap(err, "parse precision") - } - p := Precision(n) - if !p.Valid() { - return errors.Errorf("precision %d is invalid", n) - } - c.Precision = p - return nil -} - -func (c ColDateTime64Auto) Row(i int) time.Time { - return c.ColDateTime64.Row(i).Time(c.Precision) -} - -func (c ColDateTime64Auto) Append(v time.Time) { - c.ColDateTime64.Append(ToDateTime64(v, c.Precision)) -} diff --git a/proto/col_datetime64_gen.go b/proto/col_datetime64_gen.go deleted file mode 100644 index ea2caa08..00000000 --- a/proto/col_datetime64_gen.go +++ /dev/null @@ -1,66 +0,0 @@ -// Code generated by ./cmd/ch-gen-col, DO NOT EDIT. - -package proto - -// ColDateTime64 represents DateTime64 column. -type ColDateTime64 []DateTime64 - -// Compile-time assertions for ColDateTime64. -var ( - _ ColInput = ColDateTime64{} - _ ColResult = (*ColDateTime64)(nil) - _ Column = (*ColDateTime64)(nil) -) - -// Type returns ColumnType of DateTime64. -func (ColDateTime64) Type() ColumnType { - return ColumnTypeDateTime64 -} - -// Rows returns count of rows in column. -func (c ColDateTime64) Rows() int { - return len(c) -} - -// Row returns i-th row of column. -func (c ColDateTime64) Row(i int) DateTime64 { - return c[i] -} - -// Append DateTime64 to column. -func (c *ColDateTime64) Append(v DateTime64) { - *c = append(*c, v) -} - -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColDateTime64) Reset() { - *c = (*c)[:0] -} - -// LowCardinality returns LowCardinality for DateTime64 . -func (c *ColDateTime64) LowCardinality() *ColLowCardinality[DateTime64] { - return &ColLowCardinality[DateTime64]{ - index: c, - } -} - -// Array is helper that creates Array of DateTime64. -func (c *ColDateTime64) Array() *ColArr[DateTime64] { - return &ColArr[DateTime64]{ - Data: c, - } -} - -// Nullable is helper that creates Nullable(DateTime64). -func (c *ColDateTime64) Nullable() *ColNullable[DateTime64] { - return &ColNullable[DateTime64]{ - Values: c, - } -} - -// NewArrDateTime64 returns new Array(DateTime64). -func NewArrDateTime64() *ColArr[DateTime64] { - return &ColArr[DateTime64]{ - Data: new(ColDateTime64), - } -} diff --git a/proto/col_datetime64_gen_test.go b/proto/col_datetime64_gen_test.go index 30e26aac..837f2a34 100644 --- a/proto/col_datetime64_gen_test.go +++ b/proto/col_datetime64_gen_test.go @@ -17,9 +17,7 @@ func TestColDateTime64_DecodeColumn(t *testing.T) { const rows = 50 var data ColDateTime64 for i := 0; i < rows; i++ { - v := DateTime64(i) - data.Append(v) - require.Equal(t, v, data.Row(i)) + data.Data = append(data.Data, DateTime64(i)) } var buf Buffer @@ -38,7 +36,7 @@ func TestColDateTime64_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) - require.Equal(t, ColumnTypeDateTime64, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -62,47 +60,11 @@ func TestColDateTime64_DecodeColumn(t *testing.T) { }) } -func TestColDateTime64Array(t *testing.T) { - const rows = 50 - data := NewArrDateTime64() - for i := 0; i < rows; i++ { - data.Append([]DateTime64{ - DateTime64(i), - DateTime64(i + 1), - DateTime64(i + 2), - }) - } - - var buf Buffer - data.EncodeColumn(&buf) - t.Run("Golden", func(t *testing.T) { - gold.Bytes(t, buf.Buf, "col_arr_datetime64") - }) - t.Run("Ok", func(t *testing.T) { - br := bytes.NewReader(buf.Buf) - r := NewReader(br) - - dec := NewArrDateTime64() - require.NoError(t, dec.DecodeColumn(r, rows)) - require.Equal(t, data, dec) - require.Equal(t, rows, dec.Rows()) - dec.Reset() - require.Equal(t, 0, dec.Rows()) - require.Equal(t, ColumnTypeDateTime64.Array(), dec.Type()) - }) - t.Run("ErrUnexpectedEOF", func(t *testing.T) { - r := NewReader(bytes.NewReader(nil)) - - dec := NewArrDateTime64() - require.ErrorIs(t, dec.DecodeColumn(r, rows), io.ErrUnexpectedEOF) - }) -} - func BenchmarkColDateTime64_DecodeColumn(b *testing.B) { const rows = 1_000 var data ColDateTime64 for i := 0; i < rows; i++ { - data = append(data, DateTime64(i)) + data.Data = append(data.Data, DateTime64(i)) } var buf Buffer @@ -134,7 +96,7 @@ func BenchmarkColDateTime64_EncodeColumn(b *testing.B) { const rows = 1_000 var data ColDateTime64 for i := 0; i < rows; i++ { - data = append(data, DateTime64(i)) + data.Data = append(data.Data, DateTime64(i)) } var buf Buffer diff --git a/proto/col_datetime64_safe_gen.go b/proto/col_datetime64_safe_gen.go index d71a0ee9..121d0485 100644 --- a/proto/col_datetime64_safe_gen.go +++ b/proto/col_datetime64_safe_gen.go @@ -22,7 +22,7 @@ func (c *ColDateTime64) DecodeColumn(r *Reader, rows int) error { if err != nil { return errors.Wrap(err, "read") } - v := *c + v := c.Data // Move bound check out of loop. // // See https://github.com/golang/go/issues/30945. @@ -32,22 +32,23 @@ func (c *ColDateTime64) DecodeColumn(r *Reader, rows int) error { DateTime64(binary.LittleEndian.Uint64(data[i:i+size])), ) } - *c = v + c.Data = v return nil } // EncodeColumn encodes DateTime64 rows to *Buffer. func (c ColDateTime64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c.Data + if len(v) == 0 { return } const size = 64 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint64( b.Buf[offset:offset+size], - uint64(v), + uint64(vv), ) offset += size } diff --git a/proto/col_datetime64_unsafe_gen.go b/proto/col_datetime64_unsafe_gen.go index 3c1efadb..4f197fa2 100644 --- a/proto/col_datetime64_unsafe_gen.go +++ b/proto/col_datetime64_unsafe_gen.go @@ -15,8 +15,8 @@ func (c *ColDateTime64) DecodeColumn(r *Reader, rows int) error { if rows == 0 { return nil } - *c = append(*c, make([]DateTime64, rows)...) - s := *(*slice)(unsafe.Pointer(c)) + c.Data = append(c.Data, make([]DateTime64, rows)...) + s := *(*slice)(unsafe.Pointer(&c.Data)) const size = 64 / 8 s.Len *= size s.Cap *= size @@ -29,13 +29,14 @@ func (c *ColDateTime64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes DateTime64 rows to *Buffer. func (c ColDateTime64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c.Data + if len(v) == 0 { return } offset := len(b.Buf) const size = 64 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_datetime_gen.go b/proto/col_datetime_gen.go deleted file mode 100644 index 5767335a..00000000 --- a/proto/col_datetime_gen.go +++ /dev/null @@ -1,66 +0,0 @@ -// Code generated by ./cmd/ch-gen-col, DO NOT EDIT. - -package proto - -// ColDateTime represents DateTime column. -type ColDateTime []DateTime - -// Compile-time assertions for ColDateTime. -var ( - _ ColInput = ColDateTime{} - _ ColResult = (*ColDateTime)(nil) - _ Column = (*ColDateTime)(nil) -) - -// Type returns ColumnType of DateTime. -func (ColDateTime) Type() ColumnType { - return ColumnTypeDateTime -} - -// Rows returns count of rows in column. -func (c ColDateTime) Rows() int { - return len(c) -} - -// Row returns i-th row of column. -func (c ColDateTime) Row(i int) DateTime { - return c[i] -} - -// Append DateTime to column. -func (c *ColDateTime) Append(v DateTime) { - *c = append(*c, v) -} - -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColDateTime) Reset() { - *c = (*c)[:0] -} - -// LowCardinality returns LowCardinality for DateTime . -func (c *ColDateTime) LowCardinality() *ColLowCardinality[DateTime] { - return &ColLowCardinality[DateTime]{ - index: c, - } -} - -// Array is helper that creates Array of DateTime. -func (c *ColDateTime) Array() *ColArr[DateTime] { - return &ColArr[DateTime]{ - Data: c, - } -} - -// Nullable is helper that creates Nullable(DateTime). -func (c *ColDateTime) Nullable() *ColNullable[DateTime] { - return &ColNullable[DateTime]{ - Values: c, - } -} - -// NewArrDateTime returns new Array(DateTime). -func NewArrDateTime() *ColArr[DateTime] { - return &ColArr[DateTime]{ - Data: new(ColDateTime), - } -} diff --git a/proto/col_datetime_gen_test.go b/proto/col_datetime_gen_test.go index 495534c6..1d09af5c 100644 --- a/proto/col_datetime_gen_test.go +++ b/proto/col_datetime_gen_test.go @@ -17,9 +17,7 @@ func TestColDateTime_DecodeColumn(t *testing.T) { const rows = 50 var data ColDateTime for i := 0; i < rows; i++ { - v := DateTime(i) - data.Append(v) - require.Equal(t, v, data.Row(i)) + data.Data = append(data.Data, DateTime(i)) } var buf Buffer @@ -38,7 +36,7 @@ func TestColDateTime_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) - require.Equal(t, ColumnTypeDateTime, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -62,47 +60,11 @@ func TestColDateTime_DecodeColumn(t *testing.T) { }) } -func TestColDateTimeArray(t *testing.T) { - const rows = 50 - data := NewArrDateTime() - for i := 0; i < rows; i++ { - data.Append([]DateTime{ - DateTime(i), - DateTime(i + 1), - DateTime(i + 2), - }) - } - - var buf Buffer - data.EncodeColumn(&buf) - t.Run("Golden", func(t *testing.T) { - gold.Bytes(t, buf.Buf, "col_arr_datetime") - }) - t.Run("Ok", func(t *testing.T) { - br := bytes.NewReader(buf.Buf) - r := NewReader(br) - - dec := NewArrDateTime() - require.NoError(t, dec.DecodeColumn(r, rows)) - require.Equal(t, data, dec) - require.Equal(t, rows, dec.Rows()) - dec.Reset() - require.Equal(t, 0, dec.Rows()) - require.Equal(t, ColumnTypeDateTime.Array(), dec.Type()) - }) - t.Run("ErrUnexpectedEOF", func(t *testing.T) { - r := NewReader(bytes.NewReader(nil)) - - dec := NewArrDateTime() - require.ErrorIs(t, dec.DecodeColumn(r, rows), io.ErrUnexpectedEOF) - }) -} - func BenchmarkColDateTime_DecodeColumn(b *testing.B) { const rows = 1_000 var data ColDateTime for i := 0; i < rows; i++ { - data = append(data, DateTime(i)) + data.Data = append(data.Data, DateTime(i)) } var buf Buffer @@ -134,7 +96,7 @@ func BenchmarkColDateTime_EncodeColumn(b *testing.B) { const rows = 1_000 var data ColDateTime for i := 0; i < rows; i++ { - data = append(data, DateTime(i)) + data.Data = append(data.Data, DateTime(i)) } var buf Buffer diff --git a/proto/col_datetime_safe_gen.go b/proto/col_datetime_safe_gen.go index 7cb97974..8998155a 100644 --- a/proto/col_datetime_safe_gen.go +++ b/proto/col_datetime_safe_gen.go @@ -22,7 +22,7 @@ func (c *ColDateTime) DecodeColumn(r *Reader, rows int) error { if err != nil { return errors.Wrap(err, "read") } - v := *c + v := c.Data // Move bound check out of loop. // // See https://github.com/golang/go/issues/30945. @@ -32,22 +32,23 @@ func (c *ColDateTime) DecodeColumn(r *Reader, rows int) error { DateTime(binary.LittleEndian.Uint32(data[i:i+size])), ) } - *c = v + c.Data = v return nil } // EncodeColumn encodes DateTime rows to *Buffer. func (c ColDateTime) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c.Data + if len(v) == 0 { return } const size = 32 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint32( b.Buf[offset:offset+size], - uint32(v), + uint32(vv), ) offset += size } diff --git a/proto/col_datetime_unsafe_gen.go b/proto/col_datetime_unsafe_gen.go index 20ebef70..9e8e34c7 100644 --- a/proto/col_datetime_unsafe_gen.go +++ b/proto/col_datetime_unsafe_gen.go @@ -15,8 +15,8 @@ func (c *ColDateTime) DecodeColumn(r *Reader, rows int) error { if rows == 0 { return nil } - *c = append(*c, make([]DateTime, rows)...) - s := *(*slice)(unsafe.Pointer(c)) + c.Data = append(c.Data, make([]DateTime, rows)...) + s := *(*slice)(unsafe.Pointer(&c.Data)) const size = 32 / 8 s.Len *= size s.Cap *= size @@ -29,13 +29,14 @@ func (c *ColDateTime) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes DateTime rows to *Buffer. func (c ColDateTime) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c.Data + if len(v) == 0 { return } offset := len(b.Buf) const size = 32 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_decimal128_gen.go b/proto/col_decimal128_gen.go index 571556e1..11828fbd 100644 --- a/proto/col_decimal128_gen.go +++ b/proto/col_decimal128_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColDecimal128)(nil) ) -// Type returns ColumnType of Decimal128. -func (ColDecimal128) Type() ColumnType { - return ColumnTypeDecimal128 -} - // Rows returns count of rows in column. func (c ColDecimal128) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColDecimal128) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Decimal128. +func (ColDecimal128) Type() ColumnType { + return ColumnTypeDecimal128 +} + // Row returns i-th row of column. func (c ColDecimal128) Row(i int) Decimal128 { return c[i] @@ -32,11 +37,6 @@ func (c *ColDecimal128) Append(v Decimal128) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColDecimal128) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Decimal128 . func (c *ColDecimal128) LowCardinality() *ColLowCardinality[Decimal128] { return &ColLowCardinality[Decimal128]{ diff --git a/proto/col_decimal128_gen_test.go b/proto/col_decimal128_gen_test.go index efc7ec2a..5d8cb0e7 100644 --- a/proto/col_decimal128_gen_test.go +++ b/proto/col_decimal128_gen_test.go @@ -38,7 +38,9 @@ func TestColDecimal128_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeDecimal128, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColDecimal128_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColDecimal128Array(t *testing.T) { const rows = 50 data := NewArrDecimal128() diff --git a/proto/col_decimal128_safe_gen.go b/proto/col_decimal128_safe_gen.go index d259ce9f..a693514d 100644 --- a/proto/col_decimal128_safe_gen.go +++ b/proto/col_decimal128_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColDecimal128) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Decimal128 rows to *Buffer. func (c ColDecimal128) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 128 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binPutUInt128( b.Buf[offset:offset+size], - UInt128(v), + UInt128(vv), ) offset += size } diff --git a/proto/col_decimal128_unsafe_gen.go b/proto/col_decimal128_unsafe_gen.go index 215b2600..6f5fdbcb 100644 --- a/proto/col_decimal128_unsafe_gen.go +++ b/proto/col_decimal128_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColDecimal128) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Decimal128 rows to *Buffer. func (c ColDecimal128) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 128 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_decimal256_gen.go b/proto/col_decimal256_gen.go index 9364ae9d..419512d8 100644 --- a/proto/col_decimal256_gen.go +++ b/proto/col_decimal256_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColDecimal256)(nil) ) -// Type returns ColumnType of Decimal256. -func (ColDecimal256) Type() ColumnType { - return ColumnTypeDecimal256 -} - // Rows returns count of rows in column. func (c ColDecimal256) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColDecimal256) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Decimal256. +func (ColDecimal256) Type() ColumnType { + return ColumnTypeDecimal256 +} + // Row returns i-th row of column. func (c ColDecimal256) Row(i int) Decimal256 { return c[i] @@ -32,11 +37,6 @@ func (c *ColDecimal256) Append(v Decimal256) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColDecimal256) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Decimal256 . func (c *ColDecimal256) LowCardinality() *ColLowCardinality[Decimal256] { return &ColLowCardinality[Decimal256]{ diff --git a/proto/col_decimal256_gen_test.go b/proto/col_decimal256_gen_test.go index 05f0dcce..b64240f6 100644 --- a/proto/col_decimal256_gen_test.go +++ b/proto/col_decimal256_gen_test.go @@ -38,7 +38,9 @@ func TestColDecimal256_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeDecimal256, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColDecimal256_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColDecimal256Array(t *testing.T) { const rows = 50 data := NewArrDecimal256() diff --git a/proto/col_decimal256_safe_gen.go b/proto/col_decimal256_safe_gen.go index 481b73e4..bacd5f46 100644 --- a/proto/col_decimal256_safe_gen.go +++ b/proto/col_decimal256_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColDecimal256) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Decimal256 rows to *Buffer. func (c ColDecimal256) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 256 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binPutUInt256( b.Buf[offset:offset+size], - UInt256(v), + UInt256(vv), ) offset += size } diff --git a/proto/col_decimal256_unsafe_gen.go b/proto/col_decimal256_unsafe_gen.go index 6c2c105b..88a8cde6 100644 --- a/proto/col_decimal256_unsafe_gen.go +++ b/proto/col_decimal256_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColDecimal256) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Decimal256 rows to *Buffer. func (c ColDecimal256) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 256 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_decimal32_gen.go b/proto/col_decimal32_gen.go index 8323d35a..e1fe67f6 100644 --- a/proto/col_decimal32_gen.go +++ b/proto/col_decimal32_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColDecimal32)(nil) ) -// Type returns ColumnType of Decimal32. -func (ColDecimal32) Type() ColumnType { - return ColumnTypeDecimal32 -} - // Rows returns count of rows in column. func (c ColDecimal32) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColDecimal32) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Decimal32. +func (ColDecimal32) Type() ColumnType { + return ColumnTypeDecimal32 +} + // Row returns i-th row of column. func (c ColDecimal32) Row(i int) Decimal32 { return c[i] @@ -32,11 +37,6 @@ func (c *ColDecimal32) Append(v Decimal32) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColDecimal32) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Decimal32 . func (c *ColDecimal32) LowCardinality() *ColLowCardinality[Decimal32] { return &ColLowCardinality[Decimal32]{ diff --git a/proto/col_decimal32_gen_test.go b/proto/col_decimal32_gen_test.go index dcdf61ad..ed24ebaa 100644 --- a/proto/col_decimal32_gen_test.go +++ b/proto/col_decimal32_gen_test.go @@ -38,7 +38,9 @@ func TestColDecimal32_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeDecimal32, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColDecimal32_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColDecimal32Array(t *testing.T) { const rows = 50 data := NewArrDecimal32() diff --git a/proto/col_decimal32_safe_gen.go b/proto/col_decimal32_safe_gen.go index f72349c3..b26ebefa 100644 --- a/proto/col_decimal32_safe_gen.go +++ b/proto/col_decimal32_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColDecimal32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Decimal32 rows to *Buffer. func (c ColDecimal32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 32 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint32( b.Buf[offset:offset+size], - uint32(v), + uint32(vv), ) offset += size } diff --git a/proto/col_decimal32_unsafe_gen.go b/proto/col_decimal32_unsafe_gen.go index e5c80347..2f2b0095 100644 --- a/proto/col_decimal32_unsafe_gen.go +++ b/proto/col_decimal32_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColDecimal32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Decimal32 rows to *Buffer. func (c ColDecimal32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 32 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_decimal64_gen.go b/proto/col_decimal64_gen.go index c697c79c..fc26f848 100644 --- a/proto/col_decimal64_gen.go +++ b/proto/col_decimal64_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColDecimal64)(nil) ) -// Type returns ColumnType of Decimal64. -func (ColDecimal64) Type() ColumnType { - return ColumnTypeDecimal64 -} - // Rows returns count of rows in column. func (c ColDecimal64) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColDecimal64) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Decimal64. +func (ColDecimal64) Type() ColumnType { + return ColumnTypeDecimal64 +} + // Row returns i-th row of column. func (c ColDecimal64) Row(i int) Decimal64 { return c[i] @@ -32,11 +37,6 @@ func (c *ColDecimal64) Append(v Decimal64) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColDecimal64) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Decimal64 . func (c *ColDecimal64) LowCardinality() *ColLowCardinality[Decimal64] { return &ColLowCardinality[Decimal64]{ diff --git a/proto/col_decimal64_gen_test.go b/proto/col_decimal64_gen_test.go index 0bfb6e13..3e1986eb 100644 --- a/proto/col_decimal64_gen_test.go +++ b/proto/col_decimal64_gen_test.go @@ -38,7 +38,9 @@ func TestColDecimal64_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeDecimal64, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColDecimal64_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColDecimal64Array(t *testing.T) { const rows = 50 data := NewArrDecimal64() diff --git a/proto/col_decimal64_safe_gen.go b/proto/col_decimal64_safe_gen.go index aaa806f9..550c3ed2 100644 --- a/proto/col_decimal64_safe_gen.go +++ b/proto/col_decimal64_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColDecimal64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Decimal64 rows to *Buffer. func (c ColDecimal64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 64 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint64( b.Buf[offset:offset+size], - uint64(v), + uint64(vv), ) offset += size } diff --git a/proto/col_decimal64_unsafe_gen.go b/proto/col_decimal64_unsafe_gen.go index 8a12e2c8..7e5b9a4e 100644 --- a/proto/col_decimal64_unsafe_gen.go +++ b/proto/col_decimal64_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColDecimal64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Decimal64 rows to *Buffer. func (c ColDecimal64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 64 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_enum16_gen.go b/proto/col_enum16_gen.go index 320534b7..3fc2f068 100644 --- a/proto/col_enum16_gen.go +++ b/proto/col_enum16_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColEnum16)(nil) ) -// Type returns ColumnType of Enum16. -func (ColEnum16) Type() ColumnType { - return ColumnTypeEnum16 -} - // Rows returns count of rows in column. func (c ColEnum16) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColEnum16) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Enum16. +func (ColEnum16) Type() ColumnType { + return ColumnTypeEnum16 +} + // Row returns i-th row of column. func (c ColEnum16) Row(i int) Enum16 { return c[i] @@ -32,11 +37,6 @@ func (c *ColEnum16) Append(v Enum16) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColEnum16) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Enum16 . func (c *ColEnum16) LowCardinality() *ColLowCardinality[Enum16] { return &ColLowCardinality[Enum16]{ diff --git a/proto/col_enum16_gen_test.go b/proto/col_enum16_gen_test.go index 327f8883..dc336a81 100644 --- a/proto/col_enum16_gen_test.go +++ b/proto/col_enum16_gen_test.go @@ -38,7 +38,9 @@ func TestColEnum16_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeEnum16, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColEnum16_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColEnum16Array(t *testing.T) { const rows = 50 data := NewArrEnum16() diff --git a/proto/col_enum16_safe_gen.go b/proto/col_enum16_safe_gen.go index ed8cb6a3..b192951b 100644 --- a/proto/col_enum16_safe_gen.go +++ b/proto/col_enum16_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColEnum16) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Enum16 rows to *Buffer. func (c ColEnum16) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 16 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint16( b.Buf[offset:offset+size], - uint16(v), + uint16(vv), ) offset += size } diff --git a/proto/col_enum16_unsafe_gen.go b/proto/col_enum16_unsafe_gen.go index f1188d97..4868d10b 100644 --- a/proto/col_enum16_unsafe_gen.go +++ b/proto/col_enum16_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColEnum16) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Enum16 rows to *Buffer. func (c ColEnum16) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 16 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_enum8_gen.go b/proto/col_enum8_gen.go index b907bc7b..ac190d27 100644 --- a/proto/col_enum8_gen.go +++ b/proto/col_enum8_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColEnum8)(nil) ) -// Type returns ColumnType of Enum8. -func (ColEnum8) Type() ColumnType { - return ColumnTypeEnum8 -} - // Rows returns count of rows in column. func (c ColEnum8) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColEnum8) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Enum8. +func (ColEnum8) Type() ColumnType { + return ColumnTypeEnum8 +} + // Row returns i-th row of column. func (c ColEnum8) Row(i int) Enum8 { return c[i] @@ -32,11 +37,6 @@ func (c *ColEnum8) Append(v Enum8) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColEnum8) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Enum8 . func (c *ColEnum8) LowCardinality() *ColLowCardinality[Enum8] { return &ColLowCardinality[Enum8]{ diff --git a/proto/col_enum8_gen_test.go b/proto/col_enum8_gen_test.go index 5c98937f..7f78550c 100644 --- a/proto/col_enum8_gen_test.go +++ b/proto/col_enum8_gen_test.go @@ -38,7 +38,9 @@ func TestColEnum8_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeEnum8, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColEnum8_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColEnum8Array(t *testing.T) { const rows = 50 data := NewArrEnum8() diff --git a/proto/col_enum8_safe_gen.go b/proto/col_enum8_safe_gen.go index 6c2b71ae..d6f599bb 100644 --- a/proto/col_enum8_safe_gen.go +++ b/proto/col_enum8_safe_gen.go @@ -32,12 +32,13 @@ func (c *ColEnum8) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Enum8 rows to *Buffer. func (c ColEnum8) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } start := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, len(c))...) - for i := range c { - b.Buf[i+start] = uint8(c[i]) + b.Buf = append(b.Buf, make([]byte, len(v))...) + for i := range v { + b.Buf[i+start] = uint8(v[i]) } } diff --git a/proto/col_enum8_unsafe_gen.go b/proto/col_enum8_unsafe_gen.go index 6635c994..c66a3759 100644 --- a/proto/col_enum8_unsafe_gen.go +++ b/proto/col_enum8_unsafe_gen.go @@ -26,12 +26,13 @@ func (c *ColEnum8) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Enum8 rows to *Buffer. func (c ColEnum8) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) src := *(*[]byte)(unsafe.Pointer(&s)) dst := b.Buf[offset:] copy(dst, src) diff --git a/proto/col_float32_gen.go b/proto/col_float32_gen.go index c924f0df..377b2f3c 100644 --- a/proto/col_float32_gen.go +++ b/proto/col_float32_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColFloat32)(nil) ) -// Type returns ColumnType of Float32. -func (ColFloat32) Type() ColumnType { - return ColumnTypeFloat32 -} - // Rows returns count of rows in column. func (c ColFloat32) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColFloat32) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Float32. +func (ColFloat32) Type() ColumnType { + return ColumnTypeFloat32 +} + // Row returns i-th row of column. func (c ColFloat32) Row(i int) float32 { return c[i] @@ -32,11 +37,6 @@ func (c *ColFloat32) Append(v float32) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColFloat32) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Float32 . func (c *ColFloat32) LowCardinality() *ColLowCardinality[float32] { return &ColLowCardinality[float32]{ diff --git a/proto/col_float32_gen_test.go b/proto/col_float32_gen_test.go index 8beac461..63819c09 100644 --- a/proto/col_float32_gen_test.go +++ b/proto/col_float32_gen_test.go @@ -38,7 +38,9 @@ func TestColFloat32_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeFloat32, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColFloat32_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColFloat32Array(t *testing.T) { const rows = 50 data := NewArrFloat32() diff --git a/proto/col_float32_safe_gen.go b/proto/col_float32_safe_gen.go index 11b51dd7..32139a1f 100644 --- a/proto/col_float32_safe_gen.go +++ b/proto/col_float32_safe_gen.go @@ -39,16 +39,17 @@ func (c *ColFloat32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Float32 rows to *Buffer. func (c ColFloat32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 32 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint32( b.Buf[offset:offset+size], - math.Float32bits(v), + math.Float32bits(vv), ) offset += size } diff --git a/proto/col_float32_unsafe_gen.go b/proto/col_float32_unsafe_gen.go index 18a48eeb..574284c1 100644 --- a/proto/col_float32_unsafe_gen.go +++ b/proto/col_float32_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColFloat32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Float32 rows to *Buffer. func (c ColFloat32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 32 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_float64_gen.go b/proto/col_float64_gen.go index 6ddc645b..a733a717 100644 --- a/proto/col_float64_gen.go +++ b/proto/col_float64_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColFloat64)(nil) ) -// Type returns ColumnType of Float64. -func (ColFloat64) Type() ColumnType { - return ColumnTypeFloat64 -} - // Rows returns count of rows in column. func (c ColFloat64) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColFloat64) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Float64. +func (ColFloat64) Type() ColumnType { + return ColumnTypeFloat64 +} + // Row returns i-th row of column. func (c ColFloat64) Row(i int) float64 { return c[i] @@ -32,11 +37,6 @@ func (c *ColFloat64) Append(v float64) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColFloat64) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Float64 . func (c *ColFloat64) LowCardinality() *ColLowCardinality[float64] { return &ColLowCardinality[float64]{ diff --git a/proto/col_float64_gen_test.go b/proto/col_float64_gen_test.go index 36536d1b..9ca03a01 100644 --- a/proto/col_float64_gen_test.go +++ b/proto/col_float64_gen_test.go @@ -38,7 +38,9 @@ func TestColFloat64_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeFloat64, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColFloat64_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColFloat64Array(t *testing.T) { const rows = 50 data := NewArrFloat64() diff --git a/proto/col_float64_safe_gen.go b/proto/col_float64_safe_gen.go index c8706b3e..cdf607ee 100644 --- a/proto/col_float64_safe_gen.go +++ b/proto/col_float64_safe_gen.go @@ -39,16 +39,17 @@ func (c *ColFloat64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Float64 rows to *Buffer. func (c ColFloat64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 64 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint64( b.Buf[offset:offset+size], - math.Float64bits(v), + math.Float64bits(vv), ) offset += size } diff --git a/proto/col_float64_unsafe_gen.go b/proto/col_float64_unsafe_gen.go index 9252ced1..fae23ab8 100644 --- a/proto/col_float64_unsafe_gen.go +++ b/proto/col_float64_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColFloat64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Float64 rows to *Buffer. func (c ColFloat64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 64 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_int128_gen.go b/proto/col_int128_gen.go index 5679fba1..34dade40 100644 --- a/proto/col_int128_gen.go +++ b/proto/col_int128_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColInt128)(nil) ) -// Type returns ColumnType of Int128. -func (ColInt128) Type() ColumnType { - return ColumnTypeInt128 -} - // Rows returns count of rows in column. func (c ColInt128) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColInt128) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Int128. +func (ColInt128) Type() ColumnType { + return ColumnTypeInt128 +} + // Row returns i-th row of column. func (c ColInt128) Row(i int) Int128 { return c[i] @@ -32,11 +37,6 @@ func (c *ColInt128) Append(v Int128) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColInt128) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Int128 . func (c *ColInt128) LowCardinality() *ColLowCardinality[Int128] { return &ColLowCardinality[Int128]{ diff --git a/proto/col_int128_gen_test.go b/proto/col_int128_gen_test.go index 5eca6a83..c54abf24 100644 --- a/proto/col_int128_gen_test.go +++ b/proto/col_int128_gen_test.go @@ -38,7 +38,9 @@ func TestColInt128_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeInt128, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColInt128_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColInt128Array(t *testing.T) { const rows = 50 data := NewArrInt128() diff --git a/proto/col_int128_safe_gen.go b/proto/col_int128_safe_gen.go index 87c77876..2d786c33 100644 --- a/proto/col_int128_safe_gen.go +++ b/proto/col_int128_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColInt128) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int128 rows to *Buffer. func (c ColInt128) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 128 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binPutUInt128( b.Buf[offset:offset+size], - UInt128(v), + UInt128(vv), ) offset += size } diff --git a/proto/col_int128_unsafe_gen.go b/proto/col_int128_unsafe_gen.go index 95fd818a..28c2d8dc 100644 --- a/proto/col_int128_unsafe_gen.go +++ b/proto/col_int128_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColInt128) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int128 rows to *Buffer. func (c ColInt128) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 128 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_int16_gen.go b/proto/col_int16_gen.go index 4d92dd9f..ec255588 100644 --- a/proto/col_int16_gen.go +++ b/proto/col_int16_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColInt16)(nil) ) -// Type returns ColumnType of Int16. -func (ColInt16) Type() ColumnType { - return ColumnTypeInt16 -} - // Rows returns count of rows in column. func (c ColInt16) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColInt16) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Int16. +func (ColInt16) Type() ColumnType { + return ColumnTypeInt16 +} + // Row returns i-th row of column. func (c ColInt16) Row(i int) int16 { return c[i] @@ -32,11 +37,6 @@ func (c *ColInt16) Append(v int16) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColInt16) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Int16 . func (c *ColInt16) LowCardinality() *ColLowCardinality[int16] { return &ColLowCardinality[int16]{ diff --git a/proto/col_int16_gen_test.go b/proto/col_int16_gen_test.go index 5627fa64..d5765695 100644 --- a/proto/col_int16_gen_test.go +++ b/proto/col_int16_gen_test.go @@ -38,7 +38,9 @@ func TestColInt16_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeInt16, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColInt16_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColInt16Array(t *testing.T) { const rows = 50 data := NewArrInt16() diff --git a/proto/col_int16_safe_gen.go b/proto/col_int16_safe_gen.go index 0e801a53..1af147ca 100644 --- a/proto/col_int16_safe_gen.go +++ b/proto/col_int16_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColInt16) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int16 rows to *Buffer. func (c ColInt16) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 16 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint16( b.Buf[offset:offset+size], - uint16(v), + uint16(vv), ) offset += size } diff --git a/proto/col_int16_unsafe_gen.go b/proto/col_int16_unsafe_gen.go index c3caf53e..60411262 100644 --- a/proto/col_int16_unsafe_gen.go +++ b/proto/col_int16_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColInt16) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int16 rows to *Buffer. func (c ColInt16) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 16 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_int256_gen.go b/proto/col_int256_gen.go index 00407b25..2952b31b 100644 --- a/proto/col_int256_gen.go +++ b/proto/col_int256_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColInt256)(nil) ) -// Type returns ColumnType of Int256. -func (ColInt256) Type() ColumnType { - return ColumnTypeInt256 -} - // Rows returns count of rows in column. func (c ColInt256) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColInt256) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Int256. +func (ColInt256) Type() ColumnType { + return ColumnTypeInt256 +} + // Row returns i-th row of column. func (c ColInt256) Row(i int) Int256 { return c[i] @@ -32,11 +37,6 @@ func (c *ColInt256) Append(v Int256) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColInt256) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Int256 . func (c *ColInt256) LowCardinality() *ColLowCardinality[Int256] { return &ColLowCardinality[Int256]{ diff --git a/proto/col_int256_gen_test.go b/proto/col_int256_gen_test.go index 15b35ad9..03da581d 100644 --- a/proto/col_int256_gen_test.go +++ b/proto/col_int256_gen_test.go @@ -38,7 +38,9 @@ func TestColInt256_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeInt256, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColInt256_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColInt256Array(t *testing.T) { const rows = 50 data := NewArrInt256() diff --git a/proto/col_int256_safe_gen.go b/proto/col_int256_safe_gen.go index 739c1912..c353d88c 100644 --- a/proto/col_int256_safe_gen.go +++ b/proto/col_int256_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColInt256) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int256 rows to *Buffer. func (c ColInt256) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 256 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binPutUInt256( b.Buf[offset:offset+size], - UInt256(v), + UInt256(vv), ) offset += size } diff --git a/proto/col_int256_unsafe_gen.go b/proto/col_int256_unsafe_gen.go index e0a299d3..fb3bb0f5 100644 --- a/proto/col_int256_unsafe_gen.go +++ b/proto/col_int256_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColInt256) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int256 rows to *Buffer. func (c ColInt256) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 256 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_int32_gen.go b/proto/col_int32_gen.go index 47333e7f..bb08c54a 100644 --- a/proto/col_int32_gen.go +++ b/proto/col_int32_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColInt32)(nil) ) -// Type returns ColumnType of Int32. -func (ColInt32) Type() ColumnType { - return ColumnTypeInt32 -} - // Rows returns count of rows in column. func (c ColInt32) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColInt32) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Int32. +func (ColInt32) Type() ColumnType { + return ColumnTypeInt32 +} + // Row returns i-th row of column. func (c ColInt32) Row(i int) int32 { return c[i] @@ -32,11 +37,6 @@ func (c *ColInt32) Append(v int32) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColInt32) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Int32 . func (c *ColInt32) LowCardinality() *ColLowCardinality[int32] { return &ColLowCardinality[int32]{ diff --git a/proto/col_int32_gen_test.go b/proto/col_int32_gen_test.go index 7c7dbf75..30c1e2b0 100644 --- a/proto/col_int32_gen_test.go +++ b/proto/col_int32_gen_test.go @@ -38,7 +38,9 @@ func TestColInt32_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeInt32, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColInt32_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColInt32Array(t *testing.T) { const rows = 50 data := NewArrInt32() diff --git a/proto/col_int32_safe_gen.go b/proto/col_int32_safe_gen.go index 8e9e8270..800d91fc 100644 --- a/proto/col_int32_safe_gen.go +++ b/proto/col_int32_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColInt32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int32 rows to *Buffer. func (c ColInt32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 32 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint32( b.Buf[offset:offset+size], - uint32(v), + uint32(vv), ) offset += size } diff --git a/proto/col_int32_unsafe_gen.go b/proto/col_int32_unsafe_gen.go index b8222c52..e6c5a333 100644 --- a/proto/col_int32_unsafe_gen.go +++ b/proto/col_int32_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColInt32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int32 rows to *Buffer. func (c ColInt32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 32 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_int64_gen.go b/proto/col_int64_gen.go index cc66c796..5cb27af2 100644 --- a/proto/col_int64_gen.go +++ b/proto/col_int64_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColInt64)(nil) ) -// Type returns ColumnType of Int64. -func (ColInt64) Type() ColumnType { - return ColumnTypeInt64 -} - // Rows returns count of rows in column. func (c ColInt64) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColInt64) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Int64. +func (ColInt64) Type() ColumnType { + return ColumnTypeInt64 +} + // Row returns i-th row of column. func (c ColInt64) Row(i int) int64 { return c[i] @@ -32,11 +37,6 @@ func (c *ColInt64) Append(v int64) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColInt64) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Int64 . func (c *ColInt64) LowCardinality() *ColLowCardinality[int64] { return &ColLowCardinality[int64]{ diff --git a/proto/col_int64_gen_test.go b/proto/col_int64_gen_test.go index 5c6d370c..4b9753b7 100644 --- a/proto/col_int64_gen_test.go +++ b/proto/col_int64_gen_test.go @@ -38,7 +38,9 @@ func TestColInt64_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeInt64, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColInt64_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColInt64Array(t *testing.T) { const rows = 50 data := NewArrInt64() diff --git a/proto/col_int64_safe_gen.go b/proto/col_int64_safe_gen.go index 9caad874..6f3cd810 100644 --- a/proto/col_int64_safe_gen.go +++ b/proto/col_int64_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColInt64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int64 rows to *Buffer. func (c ColInt64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 64 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint64( b.Buf[offset:offset+size], - uint64(v), + uint64(vv), ) offset += size } diff --git a/proto/col_int64_unsafe_gen.go b/proto/col_int64_unsafe_gen.go index 47f9216f..2e85b515 100644 --- a/proto/col_int64_unsafe_gen.go +++ b/proto/col_int64_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColInt64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int64 rows to *Buffer. func (c ColInt64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 64 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_int8_gen.go b/proto/col_int8_gen.go index e6f464ae..9883fc6d 100644 --- a/proto/col_int8_gen.go +++ b/proto/col_int8_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColInt8)(nil) ) -// Type returns ColumnType of Int8. -func (ColInt8) Type() ColumnType { - return ColumnTypeInt8 -} - // Rows returns count of rows in column. func (c ColInt8) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColInt8) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of Int8. +func (ColInt8) Type() ColumnType { + return ColumnTypeInt8 +} + // Row returns i-th row of column. func (c ColInt8) Row(i int) int8 { return c[i] @@ -32,11 +37,6 @@ func (c *ColInt8) Append(v int8) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColInt8) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for Int8 . func (c *ColInt8) LowCardinality() *ColLowCardinality[int8] { return &ColLowCardinality[int8]{ diff --git a/proto/col_int8_gen_test.go b/proto/col_int8_gen_test.go index 44708368..0ada0ebe 100644 --- a/proto/col_int8_gen_test.go +++ b/proto/col_int8_gen_test.go @@ -38,7 +38,9 @@ func TestColInt8_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeInt8, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColInt8_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColInt8Array(t *testing.T) { const rows = 50 data := NewArrInt8() diff --git a/proto/col_int8_safe_gen.go b/proto/col_int8_safe_gen.go index 95f004ba..6e8209a4 100644 --- a/proto/col_int8_safe_gen.go +++ b/proto/col_int8_safe_gen.go @@ -32,12 +32,13 @@ func (c *ColInt8) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int8 rows to *Buffer. func (c ColInt8) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } start := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, len(c))...) - for i := range c { - b.Buf[i+start] = uint8(c[i]) + b.Buf = append(b.Buf, make([]byte, len(v))...) + for i := range v { + b.Buf[i+start] = uint8(v[i]) } } diff --git a/proto/col_int8_unsafe_gen.go b/proto/col_int8_unsafe_gen.go index 5c377a26..748de299 100644 --- a/proto/col_int8_unsafe_gen.go +++ b/proto/col_int8_unsafe_gen.go @@ -26,12 +26,13 @@ func (c *ColInt8) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes Int8 rows to *Buffer. func (c ColInt8) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) src := *(*[]byte)(unsafe.Pointer(&s)) dst := b.Buf[offset:] copy(dst, src) diff --git a/proto/col_ipv4_gen.go b/proto/col_ipv4_gen.go index d54e56cc..3c3aef83 100644 --- a/proto/col_ipv4_gen.go +++ b/proto/col_ipv4_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColIPv4)(nil) ) -// Type returns ColumnType of IPv4. -func (ColIPv4) Type() ColumnType { - return ColumnTypeIPv4 -} - // Rows returns count of rows in column. func (c ColIPv4) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColIPv4) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of IPv4. +func (ColIPv4) Type() ColumnType { + return ColumnTypeIPv4 +} + // Row returns i-th row of column. func (c ColIPv4) Row(i int) IPv4 { return c[i] @@ -32,11 +37,6 @@ func (c *ColIPv4) Append(v IPv4) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColIPv4) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for IPv4 . func (c *ColIPv4) LowCardinality() *ColLowCardinality[IPv4] { return &ColLowCardinality[IPv4]{ diff --git a/proto/col_ipv4_gen_test.go b/proto/col_ipv4_gen_test.go index 77f7f58a..a94c0930 100644 --- a/proto/col_ipv4_gen_test.go +++ b/proto/col_ipv4_gen_test.go @@ -38,7 +38,9 @@ func TestColIPv4_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeIPv4, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColIPv4_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColIPv4Array(t *testing.T) { const rows = 50 data := NewArrIPv4() diff --git a/proto/col_ipv4_safe_gen.go b/proto/col_ipv4_safe_gen.go index a4fe43d4..b0fbcdc8 100644 --- a/proto/col_ipv4_safe_gen.go +++ b/proto/col_ipv4_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColIPv4) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes IPv4 rows to *Buffer. func (c ColIPv4) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 32 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint32( b.Buf[offset:offset+size], - uint32(v), + uint32(vv), ) offset += size } diff --git a/proto/col_ipv4_unsafe_gen.go b/proto/col_ipv4_unsafe_gen.go index 798653a8..bc505976 100644 --- a/proto/col_ipv4_unsafe_gen.go +++ b/proto/col_ipv4_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColIPv4) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes IPv4 rows to *Buffer. func (c ColIPv4) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 32 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_ipv6_gen.go b/proto/col_ipv6_gen.go index 790c407c..80213e04 100644 --- a/proto/col_ipv6_gen.go +++ b/proto/col_ipv6_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColIPv6)(nil) ) -// Type returns ColumnType of IPv6. -func (ColIPv6) Type() ColumnType { - return ColumnTypeIPv6 -} - // Rows returns count of rows in column. func (c ColIPv6) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColIPv6) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of IPv6. +func (ColIPv6) Type() ColumnType { + return ColumnTypeIPv6 +} + // Row returns i-th row of column. func (c ColIPv6) Row(i int) IPv6 { return c[i] @@ -32,11 +37,6 @@ func (c *ColIPv6) Append(v IPv6) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColIPv6) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for IPv6 . func (c *ColIPv6) LowCardinality() *ColLowCardinality[IPv6] { return &ColLowCardinality[IPv6]{ diff --git a/proto/col_ipv6_gen_test.go b/proto/col_ipv6_gen_test.go index 64790d96..abf822ea 100644 --- a/proto/col_ipv6_gen_test.go +++ b/proto/col_ipv6_gen_test.go @@ -38,7 +38,9 @@ func TestColIPv6_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeIPv6, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColIPv6_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColIPv6Array(t *testing.T) { const rows = 50 data := NewArrIPv6() diff --git a/proto/col_ipv6_safe_gen.go b/proto/col_ipv6_safe_gen.go index 3e63fb78..2c011f38 100644 --- a/proto/col_ipv6_safe_gen.go +++ b/proto/col_ipv6_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColIPv6) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes IPv6 rows to *Buffer. func (c ColIPv6) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 128 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binPutIPv6( b.Buf[offset:offset+size], - v, + vv, ) offset += size } diff --git a/proto/col_ipv6_unsafe_gen.go b/proto/col_ipv6_unsafe_gen.go index cbc942d0..55f50f0e 100644 --- a/proto/col_ipv6_unsafe_gen.go +++ b/proto/col_ipv6_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColIPv6) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes IPv6 rows to *Buffer. func (c ColIPv6) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 128 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_uint128_gen.go b/proto/col_uint128_gen.go index f256bef8..f399319f 100644 --- a/proto/col_uint128_gen.go +++ b/proto/col_uint128_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColUInt128)(nil) ) -// Type returns ColumnType of UInt128. -func (ColUInt128) Type() ColumnType { - return ColumnTypeUInt128 -} - // Rows returns count of rows in column. func (c ColUInt128) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColUInt128) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of UInt128. +func (ColUInt128) Type() ColumnType { + return ColumnTypeUInt128 +} + // Row returns i-th row of column. func (c ColUInt128) Row(i int) UInt128 { return c[i] @@ -32,11 +37,6 @@ func (c *ColUInt128) Append(v UInt128) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColUInt128) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for UInt128 . func (c *ColUInt128) LowCardinality() *ColLowCardinality[UInt128] { return &ColLowCardinality[UInt128]{ diff --git a/proto/col_uint128_gen_test.go b/proto/col_uint128_gen_test.go index 10e51589..33390e1d 100644 --- a/proto/col_uint128_gen_test.go +++ b/proto/col_uint128_gen_test.go @@ -38,7 +38,9 @@ func TestColUInt128_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeUInt128, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColUInt128_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColUInt128Array(t *testing.T) { const rows = 50 data := NewArrUInt128() diff --git a/proto/col_uint128_safe_gen.go b/proto/col_uint128_safe_gen.go index 83b0fee9..bc427777 100644 --- a/proto/col_uint128_safe_gen.go +++ b/proto/col_uint128_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColUInt128) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt128 rows to *Buffer. func (c ColUInt128) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 128 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binPutUInt128( b.Buf[offset:offset+size], - v, + vv, ) offset += size } diff --git a/proto/col_uint128_unsafe_gen.go b/proto/col_uint128_unsafe_gen.go index 1b9c37cc..4ef24b2a 100644 --- a/proto/col_uint128_unsafe_gen.go +++ b/proto/col_uint128_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColUInt128) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt128 rows to *Buffer. func (c ColUInt128) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 128 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_uint16_gen.go b/proto/col_uint16_gen.go index f6762246..f9d7d4c7 100644 --- a/proto/col_uint16_gen.go +++ b/proto/col_uint16_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColUInt16)(nil) ) -// Type returns ColumnType of UInt16. -func (ColUInt16) Type() ColumnType { - return ColumnTypeUInt16 -} - // Rows returns count of rows in column. func (c ColUInt16) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColUInt16) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of UInt16. +func (ColUInt16) Type() ColumnType { + return ColumnTypeUInt16 +} + // Row returns i-th row of column. func (c ColUInt16) Row(i int) uint16 { return c[i] @@ -32,11 +37,6 @@ func (c *ColUInt16) Append(v uint16) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColUInt16) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for UInt16 . func (c *ColUInt16) LowCardinality() *ColLowCardinality[uint16] { return &ColLowCardinality[uint16]{ diff --git a/proto/col_uint16_gen_test.go b/proto/col_uint16_gen_test.go index 29ffe500..323cb8ad 100644 --- a/proto/col_uint16_gen_test.go +++ b/proto/col_uint16_gen_test.go @@ -38,7 +38,9 @@ func TestColUInt16_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeUInt16, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColUInt16_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColUInt16Array(t *testing.T) { const rows = 50 data := NewArrUInt16() diff --git a/proto/col_uint16_safe_gen.go b/proto/col_uint16_safe_gen.go index 74893b6c..54f8ccd6 100644 --- a/proto/col_uint16_safe_gen.go +++ b/proto/col_uint16_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColUInt16) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt16 rows to *Buffer. func (c ColUInt16) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 16 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint16( b.Buf[offset:offset+size], - v, + vv, ) offset += size } diff --git a/proto/col_uint16_unsafe_gen.go b/proto/col_uint16_unsafe_gen.go index 28546a30..6d24308a 100644 --- a/proto/col_uint16_unsafe_gen.go +++ b/proto/col_uint16_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColUInt16) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt16 rows to *Buffer. func (c ColUInt16) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 16 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_uint256_gen.go b/proto/col_uint256_gen.go index eeee3e6c..d3ac257b 100644 --- a/proto/col_uint256_gen.go +++ b/proto/col_uint256_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColUInt256)(nil) ) -// Type returns ColumnType of UInt256. -func (ColUInt256) Type() ColumnType { - return ColumnTypeUInt256 -} - // Rows returns count of rows in column. func (c ColUInt256) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColUInt256) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of UInt256. +func (ColUInt256) Type() ColumnType { + return ColumnTypeUInt256 +} + // Row returns i-th row of column. func (c ColUInt256) Row(i int) UInt256 { return c[i] @@ -32,11 +37,6 @@ func (c *ColUInt256) Append(v UInt256) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColUInt256) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for UInt256 . func (c *ColUInt256) LowCardinality() *ColLowCardinality[UInt256] { return &ColLowCardinality[UInt256]{ diff --git a/proto/col_uint256_gen_test.go b/proto/col_uint256_gen_test.go index 97c52777..bf2335fd 100644 --- a/proto/col_uint256_gen_test.go +++ b/proto/col_uint256_gen_test.go @@ -38,7 +38,9 @@ func TestColUInt256_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeUInt256, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColUInt256_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColUInt256Array(t *testing.T) { const rows = 50 data := NewArrUInt256() diff --git a/proto/col_uint256_safe_gen.go b/proto/col_uint256_safe_gen.go index 8e81c5e7..16491bd9 100644 --- a/proto/col_uint256_safe_gen.go +++ b/proto/col_uint256_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColUInt256) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt256 rows to *Buffer. func (c ColUInt256) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 256 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binPutUInt256( b.Buf[offset:offset+size], - v, + vv, ) offset += size } diff --git a/proto/col_uint256_unsafe_gen.go b/proto/col_uint256_unsafe_gen.go index 7cc3ddc0..e78156bb 100644 --- a/proto/col_uint256_unsafe_gen.go +++ b/proto/col_uint256_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColUInt256) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt256 rows to *Buffer. func (c ColUInt256) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 256 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_uint32_gen.go b/proto/col_uint32_gen.go index 35b3376b..384215ef 100644 --- a/proto/col_uint32_gen.go +++ b/proto/col_uint32_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColUInt32)(nil) ) -// Type returns ColumnType of UInt32. -func (ColUInt32) Type() ColumnType { - return ColumnTypeUInt32 -} - // Rows returns count of rows in column. func (c ColUInt32) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColUInt32) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of UInt32. +func (ColUInt32) Type() ColumnType { + return ColumnTypeUInt32 +} + // Row returns i-th row of column. func (c ColUInt32) Row(i int) uint32 { return c[i] @@ -32,11 +37,6 @@ func (c *ColUInt32) Append(v uint32) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColUInt32) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for UInt32 . func (c *ColUInt32) LowCardinality() *ColLowCardinality[uint32] { return &ColLowCardinality[uint32]{ diff --git a/proto/col_uint32_gen_test.go b/proto/col_uint32_gen_test.go index 33a20489..178092db 100644 --- a/proto/col_uint32_gen_test.go +++ b/proto/col_uint32_gen_test.go @@ -38,7 +38,9 @@ func TestColUInt32_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeUInt32, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColUInt32_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColUInt32Array(t *testing.T) { const rows = 50 data := NewArrUInt32() diff --git a/proto/col_uint32_safe_gen.go b/proto/col_uint32_safe_gen.go index c642cc06..49b4222c 100644 --- a/proto/col_uint32_safe_gen.go +++ b/proto/col_uint32_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColUInt32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt32 rows to *Buffer. func (c ColUInt32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 32 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint32( b.Buf[offset:offset+size], - v, + vv, ) offset += size } diff --git a/proto/col_uint32_unsafe_gen.go b/proto/col_uint32_unsafe_gen.go index e9d99ac7..be4ab80e 100644 --- a/proto/col_uint32_unsafe_gen.go +++ b/proto/col_uint32_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColUInt32) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt32 rows to *Buffer. func (c ColUInt32) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 32 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_uint64_gen.go b/proto/col_uint64_gen.go index 991d81ef..764ea00c 100644 --- a/proto/col_uint64_gen.go +++ b/proto/col_uint64_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColUInt64)(nil) ) -// Type returns ColumnType of UInt64. -func (ColUInt64) Type() ColumnType { - return ColumnTypeUInt64 -} - // Rows returns count of rows in column. func (c ColUInt64) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColUInt64) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of UInt64. +func (ColUInt64) Type() ColumnType { + return ColumnTypeUInt64 +} + // Row returns i-th row of column. func (c ColUInt64) Row(i int) uint64 { return c[i] @@ -32,11 +37,6 @@ func (c *ColUInt64) Append(v uint64) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColUInt64) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for UInt64 . func (c *ColUInt64) LowCardinality() *ColLowCardinality[uint64] { return &ColLowCardinality[uint64]{ diff --git a/proto/col_uint64_gen_test.go b/proto/col_uint64_gen_test.go index c89bb85c..067123e9 100644 --- a/proto/col_uint64_gen_test.go +++ b/proto/col_uint64_gen_test.go @@ -38,7 +38,9 @@ func TestColUInt64_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeUInt64, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColUInt64_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColUInt64Array(t *testing.T) { const rows = 50 data := NewArrUInt64() diff --git a/proto/col_uint64_safe_gen.go b/proto/col_uint64_safe_gen.go index 526f651b..e5ed04b1 100644 --- a/proto/col_uint64_safe_gen.go +++ b/proto/col_uint64_safe_gen.go @@ -38,16 +38,17 @@ func (c *ColUInt64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt64 rows to *Buffer. func (c ColUInt64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } const size = 64 / 8 offset := len(b.Buf) - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - for _, v := range c { + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + for _, vv := range v { binary.LittleEndian.PutUint64( b.Buf[offset:offset+size], - v, + vv, ) offset += size } diff --git a/proto/col_uint64_unsafe_gen.go b/proto/col_uint64_unsafe_gen.go index d54643a4..0a803dec 100644 --- a/proto/col_uint64_unsafe_gen.go +++ b/proto/col_uint64_unsafe_gen.go @@ -29,13 +29,14 @@ func (c *ColUInt64) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt64 rows to *Buffer. func (c ColUInt64) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } offset := len(b.Buf) const size = 64 / 8 - b.Buf = append(b.Buf, make([]byte, size*len(c))...) - s := *(*slice)(unsafe.Pointer(&c)) + b.Buf = append(b.Buf, make([]byte, size*len(v))...) + s := *(*slice)(unsafe.Pointer(&v)) s.Len *= size s.Cap *= size src := *(*[]byte)(unsafe.Pointer(&s)) diff --git a/proto/col_uint8_gen.go b/proto/col_uint8_gen.go index dba2a1bc..97b06e33 100644 --- a/proto/col_uint8_gen.go +++ b/proto/col_uint8_gen.go @@ -12,16 +12,21 @@ var ( _ Column = (*ColUInt8)(nil) ) -// Type returns ColumnType of UInt8. -func (ColUInt8) Type() ColumnType { - return ColumnTypeUInt8 -} - // Rows returns count of rows in column. func (c ColUInt8) Rows() int { return len(c) } +// Reset resets data in row, preserving capacity for efficiency. +func (c *ColUInt8) Reset() { + *c = (*c)[:0] +} + +// Type returns ColumnType of UInt8. +func (ColUInt8) Type() ColumnType { + return ColumnTypeUInt8 +} + // Row returns i-th row of column. func (c ColUInt8) Row(i int) uint8 { return c[i] @@ -32,11 +37,6 @@ func (c *ColUInt8) Append(v uint8) { *c = append(*c, v) } -// Reset resets data in row, preserving capacity for efficiency. -func (c *ColUInt8) Reset() { - *c = (*c)[:0] -} - // LowCardinality returns LowCardinality for UInt8 . func (c *ColUInt8) LowCardinality() *ColLowCardinality[uint8] { return &ColLowCardinality[uint8]{ diff --git a/proto/col_uint8_gen_test.go b/proto/col_uint8_gen_test.go index 4e1902c7..eb192a1c 100644 --- a/proto/col_uint8_gen_test.go +++ b/proto/col_uint8_gen_test.go @@ -38,7 +38,9 @@ func TestColUInt8_DecodeColumn(t *testing.T) { require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) + require.Equal(t, ColumnTypeUInt8, dec.Type()) + }) t.Run("ZeroRows", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) @@ -61,7 +63,6 @@ func TestColUInt8_DecodeColumn(t *testing.T) { v.EncodeColumn(nil) // should be no-op }) } - func TestColUInt8Array(t *testing.T) { const rows = 50 data := NewArrUInt8() diff --git a/proto/col_uint8_safe_gen.go b/proto/col_uint8_safe_gen.go index 02af53ae..ec5ff190 100644 --- a/proto/col_uint8_safe_gen.go +++ b/proto/col_uint8_safe_gen.go @@ -25,8 +25,9 @@ func (c *ColUInt8) DecodeColumn(r *Reader, rows int) error { // EncodeColumn encodes UInt8 rows to *Buffer. func (c ColUInt8) EncodeColumn(b *Buffer) { - if len(c) == 0 { + v := c + if len(v) == 0 { return } - b.Buf = append(b.Buf, c...) + b.Buf = append(b.Buf, v...) } diff --git a/proto/datetime64_test.go b/proto/datetime64_test.go index 6431bcdb..255c0e47 100644 --- a/proto/datetime64_test.go +++ b/proto/datetime64_test.go @@ -5,7 +5,6 @@ import ( "time" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) func TestDateTime64_Time(t *testing.T) { @@ -27,9 +26,3 @@ func TestDateTime64_Time(t *testing.T) { assert.Equal(t, time.Nanosecond, PrecisionNano.Duration(), "ns") }) } - -func TestColDateTime64_Wrap(t *testing.T) { - var data ColDateTime64 - w := data.Wrap(PrecisionMilli) - require.Equal(t, ColumnTypeDateTime64.With("3"), w.Type()) -} diff --git a/proto/profile_events.go b/proto/profile_events.go index 2dd808ab..734b7760 100644 --- a/proto/profile_events.go +++ b/proto/profile_events.go @@ -20,7 +20,7 @@ func (d *ProfileEvents) All() ([]ProfileEvent, error) { var out []ProfileEvent for i := range d.Type { e := ProfileEvent{ - Time: d.Time[i].Time(), + Time: d.Time.Row(i), Host: d.Host.Row(i), ThreadID: d.ThreadID[i], Type: ProfileEventType(d.Type[i]), diff --git a/proto/server_log.go b/proto/server_log.go index 2ed3d100..917c8a8f 100644 --- a/proto/server_log.go +++ b/proto/server_log.go @@ -42,7 +42,7 @@ func (s Logs) All() []Log { var out []Log for i := 0; i < s.Source.Rows(); i++ { out = append(out, Log{ - Time: s.Time[i].Time(), + Time: s.Time.Row(i), Host: s.HostName.Row(i), QueryID: s.QueryID.Row(i), ThreadID: s.ThreadID[i], diff --git a/query_test.go b/query_test.go index 02506010..13bae623 100644 --- a/query_test.go +++ b/query_test.go @@ -402,7 +402,7 @@ func TestClient_Query(t *testing.T) { loc, err := time.LoadLocation(tz) require.NoError(t, err) exp, err := time.ParseInLocation("2006-01-02 15:04:05", dt, loc) - v := data[0].Time().In(loc) + v := data.Row(0).In(loc) require.NoError(t, err) require.True(t, exp.Equal(v)) t.Logf("%s %d", v, v.Unix()) @@ -440,7 +440,7 @@ func TestClient_Query(t *testing.T) { } require.NoError(t, conn.Do(ctx, createTable), "create table") - data := proto.ColDateTime{1546290000} + data := proto.ColDateTime{Data: []proto.DateTime{1546290000}} insertQuery := Query{ Body: "INSERT INTO test_table VALUES", Input: []proto.InputColumn{ @@ -457,7 +457,7 @@ func TestClient_Query(t *testing.T) { }, } require.NoError(t, conn.Do(ctx, selectData), "select") - require.Len(t, data, 1) + require.Equal(t, 1, data.Rows()) require.Equal(t, data, gotData) }) t.Run("InsertDateTime64", func(t *testing.T) { @@ -471,12 +471,14 @@ func TestClient_Query(t *testing.T) { require.NoError(t, conn.Do(ctx, createTable), "create table") data := proto.ColDateTime64{ - proto.DateTime64(time.Unix(1546290000, 0).UnixNano()), + Data: []proto.DateTime64{ + proto.DateTime64(time.Unix(1546290000, 0).UnixNano()), + }, } insertQuery := Query{ Body: "INSERT INTO test_table VALUES", Input: []proto.InputColumn{ - {Name: "d", Data: data.Wrap(p)}, + {Name: "d", Data: data.WithPrecision(p)}, }, } require.NoError(t, conn.Do(ctx, insertQuery), "insert") @@ -490,22 +492,9 @@ func TestClient_Query(t *testing.T) { }, } require.NoError(t, conn.Do(ctx, selectData), "select") - require.Len(t, data, 1) + require.Equal(t, 1, data.Rows()) require.Equal(t, data, gotData) }) - t.Run("ReadAuto", func(t *testing.T) { - var gotData proto.ColDateTime64Auto - selectData := Query{ - Body: "SELECT * FROM test_table", - Result: proto.Results{ - {Name: "d", Data: &gotData}, - }, - } - require.NoError(t, conn.Do(ctx, selectData), "select") - require.Len(t, data, 1) - require.Equal(t, data, gotData.ColDateTime64) - require.Equal(t, proto.ColumnType("DateTime64(9)"), gotData.Type()) - }) }) t.Run("ArrayLowCardinality", func(t *testing.T) { t.Parallel()