diff --git a/buf.gen.yaml b/buf.gen.yaml new file mode 100644 index 00000000..1fa91647 --- /dev/null +++ b/buf.gen.yaml @@ -0,0 +1,10 @@ +version: v2 +managed: + enabled: true + override: + - file_option: go_package_prefix + value: 'github.com/bufbuild/protocompile/internal/gen' +plugins: + - remote: buf.build/protocolbuffers/go + out: internal/gen + opt: paths=source_relative diff --git a/buf.yaml b/buf.yaml new file mode 100644 index 00000000..404243e4 --- /dev/null +++ b/buf.yaml @@ -0,0 +1,10 @@ +version: v2 +modules: + - path: proto + name: buf.build/bufbuild/pbc +lint: + use: + - STANDARD +breaking: + use: + - WIRE_JSON diff --git a/experimental/ast/builtin.go b/experimental/ast/builtin.go new file mode 100644 index 00000000..58fd05f8 --- /dev/null +++ b/experimental/ast/builtin.go @@ -0,0 +1,156 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import "fmt" + +const ( + BuiltinUnknown Builtin = iota + BuiltinInt32 + BuiltinInt64 + BuiltinUInt32 + BuiltinUInt64 + BuiltinSInt32 + BuiltinSInt64 + + BuiltinFloat + BuiltinDouble + + BuiltinFixed32 + BuiltinFixed64 + BuiltinSFixed32 + BuiltinSFixed64 + + BuiltinBool + BuiltinString + BuiltinBytes + + // This corresponds to the builtin "type" map. + BuiltinMap + + // This corresponds to the builtin "constant" max, used in range expressions. + BuiltinMax + + builtinCount + + BuiltinFloat32 = BuiltinFloat + BuiltinFloat64 = BuiltinDouble +) + +var ( + builtinByName = map[string]Builtin{ + "int32": BuiltinInt32, + "int64": BuiltinInt64, + "uint32": BuiltinUInt32, + "uint64": BuiltinUInt64, + "sint32": BuiltinSInt32, + "sint64": BuiltinSInt64, + + "float": BuiltinFloat, + "double": BuiltinDouble, + + "fixed32": BuiltinFixed32, + "fixed64": BuiltinFixed64, + "sfixed32": BuiltinSFixed32, + "sfixed64": BuiltinSFixed64, + + "bool": BuiltinBool, + "string": BuiltinString, + "bytes": BuiltinBytes, + + "map": BuiltinMap, + "max": BuiltinMax, + } + + builtinNames = func() []string { + names := make([]string, builtinCount) + names[0] = "unknown" + + for name, idx := range builtinByName { + names[idx] = name + } + return names + }() + + // This is not used for syntactic analysis, since all keywords are contextual. + // This is only used for affecting how diagnostics are rendered. + // + // This is also not a complete list of keywords, only those we want to call out + // as special in Token.describe. + keywords = map[string]bool{ + "syntax": true, + "edition": true, + "import": true, + "weak": true, + "public": true, + "package": true, + + "option": true, + "message": true, + "enum": true, + "service": true, + "extend": true, + "oneof": true, + + "extensions": true, + "reserved": true, + "to": true, + + "rpc": true, + "returns": true, + + "repeated": true, + "optional": true, + "required": true, + "group": true, + "stream": true, + } +) + +// Builtin is one of the built-in Protobuf types. +type Builtin int8 + +// BuiltinByName looks up a builtin type by name. +// +// If name does not name a builtin, returns [BuiltinUnknown]. +func BuiltinByName(name string) Builtin { + // The zero value is BuiltinUnknown. + return builtinByName[name] +} + +// String implements [strings.Stringer] for Builtin. +func (b Builtin) String() string { + if int(b) < len(builtinNames) { + return builtinNames[int(b)] + } + return fmt.Sprintf("builtin%d", int(b)) +} + +// IsPrimitive returns if this builtin name refers to one of the primitive types. +func (b Builtin) IsPrimitive() bool { + switch b { + case BuiltinInt32, BuiltinInt64, + BuiltinUInt32, BuiltinUInt64, + BuiltinSInt32, BuiltinSInt64, + BuiltinFloat, BuiltinDouble, + BuiltinFixed32, BuiltinFixed64, + BuiltinSFixed32, BuiltinSFixed64, + BuiltinBool, + BuiltinString, BuiltinBytes: + return true + default: + return false + } +} diff --git a/experimental/ast/context.go b/experimental/ast/context.go new file mode 100644 index 00000000..ce86db0b --- /dev/null +++ b/experimental/ast/context.go @@ -0,0 +1,178 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + + "github.com/bufbuild/protocompile/experimental/report" + "github.com/bufbuild/protocompile/internal/arena" +) + +// Context is where all of the book-keeping for the AST of a particular file is kept. +// +// Virtually all operations inside of package ast2 involve a Context. However, most of +// the exported types carry their Context with them, so you don't need to worry about +// passing it around. +type Context struct { + file *report.IndexedFile + + // Storage for tokens. + stream []tokenImpl + syntheticTokens []tokenSynthetic + + // This contains materialized literals for some tokens. + // + // Not all tokens will have an entry here; only those that have "unusual" + // representations. This means the lexer can deal with the complex parsing + // logic on our behalf in general, but common cases are re-parsed on-demand. + // + // All elements of this map are string, uint64, or float64. + literals map[rawToken]any + + // Storage for the various node types. + decls decls + types types + exprs exprs + + options arena.Arena[rawCompactOptions] +} + +// Contextual is any AST type that carries a context (virtually all of them). +type Contextual interface { + // Context returns this types's [Context]. + // + // Zero values of this type should return nil. + Context() *Context +} + +// newContext creates a fresh context for a particular file. +func newContext(file report.File) *Context { + c := &Context{file: report.NewIndexedFile(file), literals: map[rawToken]any{}} + c.NewDeclBody(Token{}) // This is the rawBody for the whole file. + return c +} + +// Parse parses a Protobuf file, and places any diagnostics encountered in report. +func Parse(file report.File, report *report.Report) File { + lexer := lexer{Context: newContext(file)} + + report.Stage++ + lexer.Lex(report) + + report.Stage++ + parse(report, lexer.Context) + + report.Stage++ + legalize(report, nil, lexer.Context.Root()) + + return lexer.Context.Root() +} + +// Context implements [Contextual] for Context. +func (c *Context) Context() *Context { + return c +} + +// Stream returns a cursor over the whole lexed token stream. +func (c *Context) Stream() *Cursor { + return &Cursor{ + withContext: withContext{c}, + start: 1, + end: rawToken(len(c.stream) + 1), + } +} + +// Path returns the (alleged) file system path for this file. +// +// This path is not used for anything except for diagnostics. +func (c *Context) Path() string { + return c.file.File().Path +} + +// Returns the full text of the file. +func (c *Context) Text() string { + return c.file.File().Text +} + +// Root returns the root AST node for this context. +func (c *Context) Root() File { + // NewContext() sticks the root at the beginning of bodies for us. + return File{wrapDecl[DeclScope](1, c)} +} + +// Tokens returns a flat slice over all of the non-synthetic tokens in this context, +// with no respect to nesting. +// +// You should probably use [Context.Stream] instead of this. +func (c *Context) Tokens() Slice[Token] { + return funcSlice[tokenImpl, Token]{ + s: c.stream, + f: func(i int, _ *tokenImpl) Token { return rawToken(i + 1).With(c) }, + } +} + +// NOTE: Some methods of Context live in the context_*.go files. This is to +// reduce clutter in this file. + +// panicIfNil panics if this context is nil. +// +// This is helpful for immediately panicking on function entry. +func (c *Context) panicIfNil() { + _ = c.file +} + +// ours checks that a contextual value is owned by this context, and panics if not. +// +// Does not panic if that is nil or has a nil context. Panics if c is nil. +func (c *Context) panicIfNotOurs(that ...Contextual) { + c.panicIfNil() + for _, that := range that { + if that == nil { + continue + } + + c2 := that.Context() + if c2 == nil || c2 == c { + continue + } + panic(fmt.Sprintf("protocompile/ast: attempt to mix different contexts: %p(%q) and %p(%q)", c, c.Path(), c2, c2.Path())) + } +} + +// withContext is an embedable type that provides common operations involving +// a context, causing it to implement Contextual. +type withContext struct { + ctx *Context +} + +// Context returns this type's associated [ast.Context]. +// +// Returns `nil` if this is this type's zero value. +func (c withContext) Context() *Context { + return c.ctx +} + +// Nil checks whether this is this type's zero value. +func (c withContext) Nil() bool { + return c.ctx == nil +} + +// panicIfNil panics if this context is nil. +// +// This is helpful for immediately panicking on function entry. +func (c *withContext) panicIfNil() { + c.Context().panicIfNil() +} diff --git a/experimental/ast/context_nodes.go b/experimental/ast/context_nodes.go new file mode 100644 index 00000000..6258cdbb --- /dev/null +++ b/experimental/ast/context_nodes.go @@ -0,0 +1,243 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import "github.com/bufbuild/protocompile/internal/arena" + +// NewDeclEmpty creates a new DeclEmpty node. +func (c *Context) NewDeclEmpty(semicolon Token) DeclEmpty { + c.panicIfNotOurs(semicolon) + + ptr := c.decls.empties.New(rawDeclEmpty{semi: semicolon.raw}) + return wrapDecl[DeclEmpty](arena.Untyped(ptr), c) +} + +// NewDeclSyntax creates a new DeclPragma node. +func (c *Context) NewDeclSyntax(args DeclSyntaxArgs) DeclSyntax { + c.panicIfNotOurs(args.Keyword, args.Equals, args.Value, args.Options, args.Semicolon) + + ptr := c.decls.syntaxes.New(rawDeclSyntax{ + keyword: args.Keyword.raw, + equals: args.Equals.raw, + options: args.Options.ptr, + semi: args.Semicolon.raw, + }) + + decl := wrapDecl[DeclSyntax](arena.Untyped(ptr), c) + decl.SetValue(args.Value) + + return decl +} + +// NewDeclPackage creates a new DeclPackage node. +func (c *Context) NewDeclPackage(args DeclPackageArgs) DeclPackage { + c.panicIfNotOurs(args.Keyword, args.Path, args.Options, args.Semicolon) + + ptr := c.decls.packages.New(rawDeclPackage{ + keyword: args.Keyword.raw, + path: args.Path.raw, + options: args.Options.ptr, + semi: args.Semicolon.raw, + }) + return wrapDecl[DeclPackage](arena.Untyped(ptr), c) +} + +// NewDeclImport creates a new DeclImport node. +func (c *Context) NewDeclImport(args DeclImportArgs) DeclImport { + c.panicIfNotOurs(args.Keyword, args.Modifier, args.ImportPath, args.Options, args.Semicolon) + + ptr := c.decls.imports.New(rawDeclImport{ + keyword: args.Keyword.raw, + modifier: args.Modifier.raw, + options: args.Options.ptr, + semi: args.Semicolon.raw, + }) + decl := wrapDecl[DeclImport](arena.Untyped(ptr), c) + decl.SetImportPath(args.ImportPath) + return decl +} + +// NewDeclDef creates a new DeclDef node. +func (c *Context) NewDeclDef(args DeclDefArgs) DeclDef { + c.panicIfNotOurs( + args.Keyword, args.Type, args.Name, args.Returns, + args.Equals, args.Value, args.Options, args.Body, args.Semicolon) + + ptr := c.decls.defs.New(rawDeclDef{ + name: args.Name.raw, + equals: args.Equals.raw, + options: args.Options.ptr, + semi: args.Semicolon.raw, + }) + decl := wrapDecl[DeclDef](arena.Untyped(ptr), c) + + if args.Type != nil { + decl.SetType(args.Type) + } else { + decl.SetType(TypePath{Path: rawPath{args.Keyword.raw, args.Keyword.raw}.With(c)}) + } + + if !args.Returns.Nil() { + decl.raw.signature = &rawSignature{ + returns: args.Returns.raw, + } + } + + decl.SetValue(args.Value) + decl.SetBody(args.Body) + + return decl +} + +// NewDeclBody creates a new DeclBody node +func (c *Context) NewDeclBody(braces Token) DeclScope { + c.panicIfNotOurs(braces) + + ptr := c.decls.bodies.New(rawDeclScope{braces: braces.raw}) + return wrapDecl[DeclScope](arena.Untyped(ptr), c) +} + +// NewDeclRange creates a new DeclRange node. +func (c *Context) NewDeclRange(args DeclRangeArgs) DeclRange { + c.panicIfNotOurs(args.Keyword, args.Options, args.Semicolon) + + ptr := c.decls.ranges.New(rawDeclRange{ + keyword: args.Keyword.raw, + options: args.Options.ptr, + semi: args.Semicolon.raw, + }) + decl := wrapDecl[DeclRange](arena.Untyped(ptr), c) + + return decl +} + +// NewExprPrefixed creates a new ExprPrefixed node. +func (c *Context) NewExprPrefixed(args ExprPrefixedArgs) ExprPrefixed { + c.panicIfNotOurs(args.Prefix, args.Expr) + + ptr := c.exprs.prefixes.New(rawExprPrefixed{ + prefix: args.Prefix.raw, + }) + expr := ExprPrefixed{ + withContext: withContext{c}, + ptr: arena.Untyped(ptr), + raw: ptr.In(&c.exprs.prefixes), + } + expr.SetExpr(args.Expr) + return expr +} + +// NewExprRange creates a new ExprRange node. +func (c *Context) NewExprRange(args ExprRangeArgs) ExprRange { + c.panicIfNotOurs(args.Start, args.To, args.End) + + ptr := c.exprs.ranges.New(rawExprRange{ + to: args.To.raw, + }) + expr := ExprRange{ + withContext: withContext{c}, + ptr: arena.Untyped(ptr), + raw: ptr.In(&c.exprs.ranges), + } + expr.SetBounds(args.Start, args.End) + return expr +} + +// NewExprArray creates a new ExprArray node. +func (c *Context) NewExprArray(brackets Token) ExprArray { + c.panicIfNotOurs(brackets) + + ptr := c.exprs.arrays.New(rawExprArray{ + brackets: brackets.raw, + }) + return ExprArray{ + withContext: withContext{c}, + ptr: arena.Untyped(ptr), + raw: ptr.In(&c.exprs.arrays), + } +} + +// NewExprDict creates a new ExprDict node. +func (c *Context) NewExprDict(braces Token) ExprDict { + c.panicIfNotOurs(braces) + + ptr := c.exprs.dicts.New(rawExprDict{ + braces: braces.raw, + }) + return ExprDict{ + withContext: withContext{c}, + ptr: arena.Untyped(ptr), + raw: ptr.In(&c.exprs.dicts), + } +} + +// NewExprPrefixed creates a new ExprPrefixed node. +func (c *Context) NewExprKV(args ExprKVArgs) ExprKV { + c.panicIfNotOurs(args.Key, args.Colon, args.Value) + + ptr := c.exprs.fields.New(rawExprKV{ + colon: args.Colon.raw, + }) + expr := ExprKV{ + withContext: withContext{c}, + ptr: arena.Untyped(ptr), + raw: ptr.In(&c.exprs.fields), + } + expr.SetKey(args.Key) + expr.SetValue(args.Value) + return expr +} + +// NewTypePrefixed creates a new TypeModified node. +func (c *Context) NewTypePrefixed(args TypePrefixedArgs) TypePrefixed { + c.panicIfNotOurs(args.Prefix, args.Type) + + ptr := c.types.modifieds.New(rawPrefixed{ + prefix: args.Prefix.raw, + }) + ty := TypePrefixed{ + withContext: withContext{c}, + ptr: arena.Untyped(ptr), + raw: ptr.In(&c.types.modifieds), + } + ty.SetType(args.Type) + + return ty +} + +// NewTypeGeneric creates a new TypeGeneric node. +func (c *Context) NewTypeGeneric(args TypeGenericArgs) TypeGeneric { + c.panicIfNotOurs(args.Path, args.AngleBrackets) + + ptr := c.types.generics.New(rawGeneric{ + path: args.Path.raw, + args: rawTypeList{brackets: args.AngleBrackets.raw}, + }) + + return TypeGeneric{ + withContext: withContext{c}, + ptr: arena.Untyped(ptr), + raw: ptr.In(&c.types.generics), + } +} + +// NewOptions creates a new Options node. +func (c *Context) NewOptions(brackets Token) CompactOptions { + c.panicIfNotOurs(brackets) + ptr := c.options.New(rawCompactOptions{ + brackets: brackets.raw, + }) + return newOptions(ptr, c) +} diff --git a/experimental/ast/context_tokens.go b/experimental/ast/context_tokens.go new file mode 100644 index 00000000..b6348eca --- /dev/null +++ b/experimental/ast/context_tokens.go @@ -0,0 +1,153 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + "math" +) + +// NewSpan creates a new span in this context. +// +// Panics if either endpoint is out of bounds or if start > end. +func (c *Context) NewSpan(start, end int) Span { + c.panicIfNil() + + if start > end { + panic(fmt.Sprintf("protocompile/ast: called NewSpan() with %d > %d", start, end)) + } + if end > len(c.Text()) { + panic(fmt.Sprintf("protocompile/ast: NewSpan() argument out of bounds: %d > %d", end, len(c.Text()))) + } + + return Span{withContext{c}, start, end} +} + +// PushToken mints the next token referring to a piece of the input source. +func (c *Context) PushToken(length int, kind TokenKind) Token { + c.panicIfNil() + + if length < 0 || length > math.MaxInt32 { + panic(fmt.Sprintf("protocompile/ast: PushToken() called with invalid length: %d", length)) + } + + var prevEnd int + if len(c.stream) != 0 { + prevEnd = int(c.stream[len(c.stream)-1].end) + } + + end := prevEnd + length + if end > len(c.Text()) { + panic(fmt.Sprintf("protocompile/ast: PushToken() overflowed backing text: %d > %d", end, len(c.Text()))) + } + + c.stream = append(c.stream, tokenImpl{ + end: uint32(prevEnd + length), + kindAndOffset: int32(kind) & tokenKindMask, + }) + + return Token{withContext{c}, rawToken(len(c.stream))} +} + +// FuseTokens marks a pair of tokens as their respective open and close. +// +// If open or close are synthethic or not currently a leaf, this function panics. +func (c *Context) FuseTokens(open, close Token) { + c.panicIfNil() + + impl1 := open.impl() + if impl1 == nil { + panic("protocompile/ast: called FuseTokens() with a synthetic open token") + } + if !impl1.IsLeaf() { + panic("protocompile/ast: called FuseTokens() with non-leaf as the open token") + } + + impl2 := close.impl() + if impl2 == nil { + panic("protocompile/ast: called FuseTokens() with a synthetic open token") + } + if !impl2.IsLeaf() { + panic("protocompile/ast: called FuseTokens() with non-leaf as the open token") + } + + diff := int32(close.raw - open.raw) + if diff <= 0 { + panic("protocompile/ast: called FuseTokens() with out-of-order") + } + + impl1.kindAndOffset |= diff << tokenOffsetShift + impl2.kindAndOffset |= -diff << tokenOffsetShift +} + +// NewIdent mints a new synthetic identifier token with the given name. +func (c *Context) NewIdent(name string) Token { + c.panicIfNil() + + return c.newSynth(tokenSynthetic{ + text: name, + kind: TokenIdent, + }) +} + +// NewIdent mints a new synthetic punctuation token with the given text. +func (c *Context) NewPunct(text string) Token { + c.panicIfNil() + + return c.newSynth(tokenSynthetic{ + text: text, + kind: TokenPunct, + }) +} + +// NewString mints a new synthetic string containing the given text. +func (c *Context) NewString(text string) Token { + c.panicIfNil() + + return c.newSynth(tokenSynthetic{ + text: text, + kind: TokenString, + }) +} + +// NewOpenClose mints a new synthetic open/close pair using the given tokens. +// +// Panics if either open or close is non-synthetic or non-leaf. +func (c *Context) NewOpenClose(open, close Token, children ...Token) { + c.panicIfNil() + + if !open.IsSynthetic() || !close.IsSynthetic() { + panic("protocompile/ast: called NewOpenClose() with non-synthetic delimiters") + } + if !open.IsLeaf() || !close.IsLeaf() { + panic("protocompile/ast: called PushCloseToken() with non-leaf as a delimiter token") + } + + synth := open.synthetic() + synth.otherEnd = close.raw + synth.children = make([]rawToken, len(children)) + for i, t := range children { + synth.children[i] = t.raw + } + close.synthetic().otherEnd = open.raw +} + +func (c *Context) newSynth(tok tokenSynthetic) Token { + c.panicIfNil() + + raw := rawToken(^len(c.syntheticTokens)) + c.syntheticTokens = append(c.syntheticTokens, tok) + return raw.With(c) +} diff --git a/experimental/ast/decl.go b/experimental/ast/decl.go new file mode 100644 index 00000000..7a856e53 --- /dev/null +++ b/experimental/ast/decl.go @@ -0,0 +1,121 @@ +package ast + +import ( + "fmt" + + "github.com/bufbuild/protocompile/internal/arena" +) + +const ( + declEmpty declKind = iota + 1 + declPragma + declPackage + declImport + declDef + declBody + declRange +) + +// Decl is a Protobuf declaration. +// +// This is implemented by types in this package of the form Decl*. +type Decl interface { + Spanner + + // with should be called on a nil value of this type (not + // a nil interface) and return the corresponding value of this type + // extracted from the given context and index. + // + // Not to be called directly; see rawDecl[T].With(). + with(ctx *Context, ptr arena.Untyped) Decl + + // kind returns what kind of decl this is. + declKind() declKind + // declIndex returns the untyped arena pointer for this declaration. + declIndex() arena.Untyped +} + +// decls is storage for every kind of Decl in a Context. +type decls struct { + empties arena.Arena[rawDeclEmpty] + syntaxes arena.Arena[rawDeclSyntax] + packages arena.Arena[rawDeclPackage] + imports arena.Arena[rawDeclImport] + defs arena.Arena[rawDeclDef] + bodies arena.Arena[rawDeclScope] + ranges arena.Arena[rawDeclRange] +} + +func (DeclEmpty) declKind() declKind { return declEmpty } +func (DeclSyntax) declKind() declKind { return declPragma } +func (DeclPackage) declKind() declKind { return declPackage } +func (DeclImport) declKind() declKind { return declImport } +func (DeclDef) declKind() declKind { return declDef } +func (DeclScope) declKind() declKind { return declBody } +func (DeclRange) declKind() declKind { return declRange } + +// DeclEmpty is an empty declaration, a lone ;. +type DeclEmpty struct { + withContext + ptr arena.Untyped + raw *rawDeclEmpty +} + +type rawDeclEmpty struct { + semi rawToken +} + +// Semicolon returns this field's ending semicolon. +// +// May be nil, if not present. +func (e DeclEmpty) Semicolon() Token { + return e.raw.semi.With(e) +} + +// Span implements [Spanner] for Service. +func (e DeclEmpty) Span() Span { + return e.Semicolon().Span() +} + +func (DeclEmpty) with(ctx *Context, ptr arena.Untyped) Decl { + return DeclEmpty{withContext{ctx}, ptr, ctx.decls.empties.At(ptr)} +} + +func (e DeclEmpty) declIndex() arena.Untyped { + return e.ptr +} + +// Wrap wraps this declID with a context to present to the user. +func wrapDecl[T Decl](p arena.Untyped, c Contextual) T { + ctx := c.Context() + var decl T + if p.Nil() { + return decl + } + return decl.with(ctx, p).(T) +} + +type declKind int8 + +// reify returns the corresponding nil Decl for the given kind, +// such that k.reify().kind() == k. +func (k declKind) reify() Decl { + switch k { + case declEmpty: + return DeclEmpty{} + case declPragma: + return DeclSyntax{} + case declPackage: + return DeclPackage{} + case declImport: + return DeclImport{} + case declDef: + return DeclDef{} + case declBody: + return DeclScope{} + case declRange: + return DeclRange{} + default: + panic(fmt.Sprintf("protocompile/ast: unknown declKind %d: this is a bug in protocompile", k)) + } +} diff --git a/experimental/ast/decl_def.go b/experimental/ast/decl_def.go new file mode 100644 index 00000000..b3ba8d11 --- /dev/null +++ b/experimental/ast/decl_def.go @@ -0,0 +1,542 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import "github.com/bufbuild/protocompile/internal/arena" + +// DeclDef is a general Protobuf definition. +// +// This [Decl] represents the union of several similar AST nodes, to aid in permissive +// parsing and precise diagnostics. +// +// This node represents messages, enums, services, extend blocks, fields, enum values, +// oneofs, groups, service methods, and options. It also permits nonsensical syntax, such as a +// message with a tag number. +// +// Generally, you should not need to work with DeclDef directly; instead, use the As* methods +// to access the correct concrete syntax production a DeclDef represents. +type DeclDef struct { + withContext + + ptr arena.Untyped + raw *rawDeclDef +} + +type rawDeclDef struct { + ty rawType // Not present for enum fields. + name rawPath + + signature *rawSignature + + equals rawToken + value rawExpr + + options arena.Pointer[rawCompactOptions] + body arena.Pointer[rawDeclScope] + semi rawToken +} + +// DeclDefArgs is arguments for creating a [DeclDef] with [Context.NewDeclDef]. +type DeclDefArgs struct { + // If both Keyword and Type are set, Type will be prioritized. + Keyword Token + Type Type + Name Path + + // NOTE: the values for the type signature are not provided at + // construction time, and should be added by mutating through + // DeclDef.Signature. + Returns Token + + Equals Token + Value Expr + + Options CompactOptions + + Body DeclScope + Semicolon Token +} + +// Type returns the "prefix" type of this definition. +// +// This type may coexist with a [Signature] in this definition. +// +// May be nil, such as for enum values. For messages and other productions +// introduced by a special keyword, this will be a [TypePath] whose single +// identifier is that keyword. +// +// See [DeclDef.Keyword]. +func (d DeclDef) Type() Type { + return d.raw.ty.With(d) +} + +// SetType sets the "prefix" type of this definition. +func (d DeclDef) SetType(ty Type) { + d.raw.ty = toRawType(ty) +} + +// Keyword returns the introducing keyword for this definition, if +// there is one. +// +// See [DeclDef.Type] for details on where this keyword comes from. +func (d DeclDef) Keyword() Token { + path, ok := d.Type().(TypePath) + if !ok { + return Token{} + } + ident := path.Path.AsIdent() + switch ident.Text() { + case "message", "enum", "service", "extend", "oneof", "group", "rpc", "option": + return ident + default: + return Token{} + } +} + +// Name returns this definition's declared name. +func (d DeclDef) Name() Path { + return d.raw.name.With(d) +} + +// Signature returns this definition's type signature, if it has one. +// +// Note that this is distinct from the type returned by [DeclDef.Type], which +// is the "prefix" type for the definition (such as for a field). This is a +// signature for e.g. a method. +// +// Not all defs have a signature, so this function may return a nil Signature. +// If you want to add one, use [DeclDef.WithSignature]. +func (d DeclDef) Signature() Signature { + if d.raw.signature == nil { + return Signature{} + } + + return Signature{ + d.withContext, + d.raw.signature, + } +} + +// WithSignature is like Signature, but it adds an empty signature if it would +// return nil. +func (d DeclDef) WithSignature() Signature { + if d.Signature().Nil() { + d.raw.signature = new(rawSignature) + } + return d.Signature() +} + +// Equals returns this definitions = token, before the value. +// May be nil. +func (d DeclDef) Equals() Token { + return d.raw.equals.With(d) +} + +// Value returns this definition's value. For a field, this will be the +// tag number, while for an option, this will be the complex expression +// representing its value. +func (d DeclDef) Value() Expr { + return d.raw.value.With(d) +} + +// SetValue sets the value of this definition. +// +// See [DeclDef.Value]. +func (d DeclDef) SetValue(expr Expr) { + d.raw.value = toRawExpr(expr) +} + +// Options returns the compact options list for this definition. +func (d DeclDef) Options() CompactOptions { + return newOptions(d.raw.options, d) +} + +// SetOptions sets the compact options list for this definition. +// +// Setting it to a nil Options clears it. +func (d DeclDef) SetOptions(opts CompactOptions) { + d.raw.options = opts.ptr +} + +// Body returns this definition's body, if it has one. +func (d DeclDef) Body() DeclScope { + return wrapDecl[DeclScope](arena.Untyped(d.raw.body), d) +} + +// SetBody sets the body for this definition. +func (d DeclDef) SetBody(b DeclScope) { + d.raw.body = arena.Pointer[rawDeclScope](b.ptr) +} + +// Semicolon returns the ending semicolon token for this definition. +// May be nil. +func (d DeclDef) Semicolon() Token { + return d.raw.semi.With(d) +} + +// Classify looks at all the fields in this definition and decides what kind of +// definition it's supposed to represent. +// +// For nonsensical definitions, this returns nil, although it is not guaranteed +// to return nil for *all* nonsensical definitions. +func (d DeclDef) Classify() Def { + kw := d.Keyword() + nameID := d.Name().AsIdent() + + eq := d.Equals() + value := d.Value() + noValue := eq.Nil() && value == nil + + switch text := kw.Text(); text { + case "message", "enum", "service", "extend", "oneof": + if (!nameID.Nil() || text == "extend") && noValue && + d.Signature().Nil() && d.Options().Nil() && !d.Body().Nil() { + + switch text { + case "message": + return DefMessage{ + Keyword: kw, + Name: nameID, + Body: d.Body(), + Decl: d, + } + case "enum": + return DefEnum{ + Keyword: kw, + Name: nameID, + Body: d.Body(), + Decl: d, + } + case "service": + return DefService{ + Keyword: kw, + Name: nameID, + Body: d.Body(), + Decl: d, + } + case "oneof": + return DefOneof{ + Keyword: kw, + Name: nameID, + Body: d.Body(), + Decl: d, + } + case "extend": + return DefExtend{ + Keyword: kw, + Extendee: d.Name(), + Body: d.Body(), + Decl: d, + } + } + } + case "group": + if !nameID.Nil() && d.Signature().Nil() && value != nil { + return DefGroup{ + Keyword: kw, + Name: nameID, + Equals: eq, + Tag: value, + Options: d.Options(), + Body: d.Body(), + Decl: d, + } + } + case "option": + if value != nil && d.Signature().Nil() && d.Options().Nil() && d.Body().Nil() { + return DefOption{ + Keyword: kw, + Option: Option{ + Path: d.Name(), + Equals: eq, + Value: value, + }, + Semicolon: d.Semicolon(), + Decl: d, + } + } + case "rpc": + if !nameID.Nil() && noValue && !d.Signature().Nil() && d.Options().Nil() { + return DefMethod{ + Keyword: kw, + Name: nameID, + Signature: d.Signature(), + Body: d.Body(), + Decl: d, + } + } + } + + // At this point, having complex path, a signature or a body is invalid. + if nameID.Nil() || !d.Signature().Nil() || !d.Body().Nil() { + return nil + } + + if d.Type() == nil { + return DefEnumValue{ + Name: nameID, + Equals: eq, + Tag: value, + Options: d.Options(), + Semicolon: d.Semicolon(), + Decl: d, + } + } + + return DefField{ + Type: d.Type(), + Name: nameID, + Equals: eq, + Tag: value, + Options: d.Options(), + Semicolon: d.Semicolon(), + Decl: d, + } +} + +// Span implements [Spanner] for DeclDef. +func (d DeclDef) Span() Span { + if d.Nil() { + return Span{} + } + return JoinSpans( + d.Type(), + d.Name(), + d.Signature(), + d.Equals(), + d.Value(), + d.Options(), + d.Body(), + d.Semicolon(), + ) +} + +func (DeclDef) with(ctx *Context, ptr arena.Untyped) Decl { + return DeclDef{withContext{ctx}, ptr, ctx.decls.defs.At(ptr)} +} + +func (d DeclDef) declIndex() arena.Untyped { + return d.ptr +} + +// Signature is a type signature of the form (types) returns (types). +// +// Signatures may have multiple inputs and outputs. +type Signature struct { + withContext + + raw *rawSignature +} + +type rawSignature struct { + input, output rawTypeList + returns rawToken +} + +// Returns returns (lol) the "returns" token that separates the input and output +// type lists. +func (s Signature) Returns() Token { + return s.raw.returns.With(s) +} + +// Inputs returns the input argument list for this signature. +func (s Signature) Inputs() TypeList { + return TypeList{ + s.withContext, + &s.raw.input, + } +} + +// Outputs returns the output argument list for this signature. +func (s Signature) Outputs() TypeList { + return TypeList{ + s.withContext, + &s.raw.output, + } +} + +// Span implemented [Spanner] for Signature. +func (s Signature) Span() Span { + return JoinSpans(s.Inputs(), s.Returns(), s.Outputs()) +} + +// Def is the return type of [DeclDef.Classify]. +// +// This interface is implemented by all the Def* types in this package, and +// can be type-asserted to any of them, usually in a type switch. +// +// A [DeclDef] can't be mutated through a Def; instead, you will need to mutate +// the general structure instead. +type Def interface { + Spanner + + isDef() +} + +// DefMessage is a [DeclDef] projected into a message definition. +// +// See [DeclDef.Classify]. +type DefMessage struct { + Keyword Token + Name Token + Body DeclScope + + Decl DeclDef +} + +// DefEnum is a [DeclDef] projected into an enum definition. +// +// See [DeclDef.Classify]. +type DefEnum struct { + Keyword Token + Name Token + Body DeclScope + + Decl DeclDef +} + +// DefService is a [DeclDef] projected into a service definition. +// +// See [DeclDef.Classify] +type DefService struct { + Keyword Token + Name Token + Body DeclScope + + Decl DeclDef +} + +// DefExtend is a [DeclDef] projected into an extension definition. +// +// See [DeclDef.Classify]. +type DefExtend struct { + Keyword Token + Extendee Path + Body DeclScope + + Decl DeclDef +} + +// DefField is a [DeclDef] projected into a field definition. +// +// See [DeclDef.Classify]. +type DefField struct { + Type Type + Name Token + Equals Token + Tag Expr + Options CompactOptions + Semicolon Token + + Decl DeclDef +} + +// DefEnumValue is a [DeclDef] projected into an enum value definition. +// +// See [DeclDef.Classify]. +type DefEnumValue struct { + Name Token + Equals Token + Tag Expr + Options CompactOptions + Semicolon Token + + Decl DeclDef +} + +// DefEnumValue is a [DeclDef] projected into a oneof definition. +// +// See [DeclDef.Classify]. +type DefOneof struct { + Keyword Token + Name Token + Body DeclScope + + Decl DeclDef +} + +// DefGroup is a [DeclDef] projected into a group definition. +// +// See [DeclDef.Classify]. +type DefGroup struct { + Keyword Token + Name Token + Equals Token + Tag Expr + Options CompactOptions + Body DeclScope + + Decl DeclDef +} + +// DefMethod is a [DeclDef] projected into a method definition. +// +// See [DeclDef.Classify]. +type DefMethod struct { + Keyword Token + Name Token + Signature Signature + Body DeclScope + + Decl DeclDef +} + +// DefOption is a [DeclDef] projected into a method definition. +// +// Yes, an option is technically not defining anything, just setting a value. +// However, it's syntactically analogous to a definition! +// +// See [DeclDef.Classify]. +type DefOption struct { + Option + + Keyword Token + Semicolon Token + + Decl DeclDef +} + +func (DefMessage) isDef() {} +func (DefEnum) isDef() {} +func (DefService) isDef() {} +func (DefExtend) isDef() {} +func (DefField) isDef() {} +func (DefEnumValue) isDef() {} +func (DefOneof) isDef() {} +func (DefGroup) isDef() {} +func (DefMethod) isDef() {} +func (DefOption) isDef() {} + +func (d DefMessage) Span() Span { return d.Decl.Span() } +func (d DefEnum) Span() Span { return d.Decl.Span() } +func (d DefService) Span() Span { return d.Decl.Span() } +func (d DefExtend) Span() Span { return d.Decl.Span() } +func (d DefField) Span() Span { return d.Decl.Span() } +func (d DefEnumValue) Span() Span { return d.Decl.Span() } +func (d DefOneof) Span() Span { return d.Decl.Span() } +func (d DefGroup) Span() Span { return d.Decl.Span() } +func (d DefMethod) Span() Span { return d.Decl.Span() } +func (d DefOption) Span() Span { return d.Decl.Span() } + +func (d DefMessage) Context() *Context { return d.Decl.Context() } +func (d DefEnum) Context() *Context { return d.Decl.Context() } +func (d DefService) Context() *Context { return d.Decl.Context() } +func (d DefExtend) Context() *Context { return d.Decl.Context() } +func (d DefField) Context() *Context { return d.Decl.Context() } +func (d DefEnumValue) Context() *Context { return d.Decl.Context() } +func (d DefOneof) Context() *Context { return d.Decl.Context() } +func (d DefGroup) Context() *Context { return d.Decl.Context() } +func (d DefMethod) Context() *Context { return d.Decl.Context() } +func (d DefOption) Context() *Context { return d.Decl.Context() } diff --git a/experimental/ast/decl_file.go b/experimental/ast/decl_file.go new file mode 100644 index 00000000..9c141778 --- /dev/null +++ b/experimental/ast/decl_file.go @@ -0,0 +1,311 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import "github.com/bufbuild/protocompile/internal/arena" + +// File is the top-level AST node for a Protobuf file. +// +// A file is a list of declarations (in other words, it is a [DeclScope]). The File type provides +// convenience functions for extracting salient elements, such as the [DeclSyntax] and the [DeclPackage]. +type File struct { + DeclScope +} + +// Syntax returns this file's pragma, if it has one. +func (f File) Syntax() (syntax DeclSyntax) { + Decls[DeclSyntax](f.DeclScope)(func(_ int, d DeclSyntax) bool { + syntax = d + return false + }) + return +} + +// Package returns this file's package declaration, if it has one. +func (f File) Package() (pkg DeclPackage) { + Decls[DeclPackage](f.DeclScope)(func(_ int, d DeclPackage) bool { + pkg = d + return false + }) + return +} + +// Imports returns an iterator over this file's import declarations. +func (f File) Imports() func(func(int, DeclImport) bool) { + return Decls[DeclImport](f.DeclScope) +} + +// DeclSyntax represents a language pragma, such as the syntax or edition keywords. +type DeclSyntax struct { + withContext + + ptr arena.Untyped + raw *rawDeclSyntax +} + +type rawDeclSyntax struct { + keyword, equals, semi rawToken + value rawExpr + options arena.Pointer[rawCompactOptions] +} + +// DeclSyntaxArgs is arguments for [Context.NewDeclSyntax]. +type DeclSyntaxArgs struct { + // Must be "syntax" or "edition". + Keyword Token + Equals Token + Value Expr + Options CompactOptions + Semicolon Token +} + +var _ Decl = DeclSyntax{} + +// Keyword returns the keyword for this pragma. +func (d DeclSyntax) Keyword() Token { + return d.raw.keyword.With(d) +} + +// IsSyntax checks whether this is an OG syntax pragma. +func (d DeclSyntax) IsSyntax() bool { + return d.Keyword().Text() == "syntax" +} + +// IsEdition checks whether this is a new-style edition pragma. +func (d DeclSyntax) IsEdition() bool { + return d.Keyword().Text() == "edition" +} + +// Equals returns the equals sign after the keyword. +// +// May be nil, if the user wrote something like syntax "proto2";. +func (d DeclSyntax) Equals() Token { + return d.raw.equals.With(d) +} + +// Value returns the value expression of this pragma. +// +// May be nil, if the user wrote something like syntax;. It can also be +// a number or an identifier, for cases like edition = 2024; or syntax = proto2;. +func (d DeclSyntax) Value() Expr { + return d.raw.value.With(d) +} + +// SetValue sets the expression for this pragma's value. +// +// If passed nil, this clears the value (e.g., for syntax = ;) +func (d DeclSyntax) SetValue(expr Expr) { + d.raw.value = toRawExpr(expr) +} + +// Options returns the compact options list for this declaration. +// +// Syntax declarations cannot have options, but we parse them anyways. +func (d DeclSyntax) Options() CompactOptions { + return newOptions(d.raw.options, d) +} + +// SetOptions sets the compact options list for this declaration. +// +// Setting it to a nil Options clears it. +func (d DeclSyntax) SetOptions(opts CompactOptions) { + d.raw.options = opts.ptr +} + +// Semicolon returns this pragma's ending semicolon. +// +// May be nil, if the user forgot it. +func (d DeclSyntax) Semicolon() Token { + return d.raw.semi.With(d) +} + +// Span implements [Spanner] for Message. +func (d DeclSyntax) Span() Span { + return JoinSpans(d.Keyword(), d.Equals(), d.Value(), d.Semicolon()) +} + +func (DeclSyntax) with(ctx *Context, ptr arena.Untyped) Decl { + return DeclSyntax{withContext{ctx}, ptr, ctx.decls.syntaxes.At(ptr)} +} + +func (d DeclSyntax) declIndex() arena.Untyped { + return d.ptr +} + +// DeclPackage is the package declaration for a file. +type DeclPackage struct { + withContext + + ptr arena.Untyped + raw *rawDeclPackage +} + +type rawDeclPackage struct { + keyword rawToken + path rawPath + semi rawToken + options arena.Pointer[rawCompactOptions] +} + +// DeclPackageArgs is arguments for [Context.NewDeclPackage]. +type DeclPackageArgs struct { + Keyword Token + Path Path + Options CompactOptions + Semicolon Token +} + +var _ Decl = DeclPackage{} + +// Keyword returns the "package" keyword for this declaration. +func (d DeclPackage) Keyword() Token { + return d.raw.keyword.With(d) +} + +// Path returns this package's path. +// +// May be nil, if the user wrote something like package;. +func (d DeclPackage) Path() Path { + return d.raw.path.With(d) +} + +// Options returns the compact options list for this declaration. +// +// Package declarations cannot have options, but we parse them anyways. +func (d DeclPackage) Options() CompactOptions { + return newOptions(d.raw.options, d) +} + +// SetOptions sets the compact options list for this declaration. +// +// Setting it to a nil Options clears it. +func (d DeclPackage) SetOptions(opts CompactOptions) { + d.raw.options = opts.ptr +} + +// Semicolon returns this package's ending semicolon. +// +// May be nil, if the user forgot it. +func (d DeclPackage) Semicolon() Token { + return d.raw.semi.With(d) +} + +// Span implements [Spanner] for DeclPackage. +func (d DeclPackage) Span() Span { + return JoinSpans(d.Keyword(), d.Path(), d.Semicolon()) +} + +func (DeclPackage) with(ctx *Context, ptr arena.Untyped) Decl { + return DeclPackage{withContext{ctx}, ptr, ctx.decls.packages.At(ptr)} +} + +func (d DeclPackage) declIndex() arena.Untyped { + return d.ptr +} + +// DeclImport is an import declaration within a file. +type DeclImport struct { + withContext + + ptr arena.Untyped + raw *rawDeclImport +} + +type rawDeclImport struct { + keyword, modifier, semi rawToken + importPath rawExpr + options arena.Pointer[rawCompactOptions] +} + +// DeclImportArgs is arguments for [Context.NewDeclImport]. +type DeclImportArgs struct { + Keyword Token + Modifier Token + ImportPath Expr + Options CompactOptions + Semicolon Token +} + +var _ Decl = DeclImport{} + +// Keyword returns the "import" keyword for this pragma. +func (d DeclImport) Keyword() Token { + return d.raw.keyword.With(d) +} + +// Keyword returns the modifier keyword for this pragma. +// +// May be nil if there is no modifier. +func (d DeclImport) Modifier() Token { + return d.raw.modifier.With(d) +} + +// IsSyntax checks whether this is an "import public" +func (d DeclImport) IsPublic() bool { + return d.Modifier().Text() == "public" +} + +// IsEdition checks whether this is an "import weak". +func (d DeclImport) IsWeak() bool { + return d.Modifier().Text() == "weak" +} + +// ImportPath returns the file path for this import as a string. +// +// May be nil, if the user forgot it. +func (d DeclImport) ImportPath() Expr { + return d.raw.importPath.With(d) +} + +// SetValue sets the expression for this import's file path. +// +// If passed nil, this clears the path expression. +func (d DeclImport) SetImportPath(expr Expr) { + d.raw.importPath = toRawExpr(expr) +} + +// Options returns the compact options list for this declaration. +// +// Imports cannot have options, but we parse them anyways. +func (d DeclImport) Options() CompactOptions { + return newOptions(d.raw.options, d) +} + +// SetOptions sets the compact options list for this declaration. +// +// Setting it to a nil Options clears it. +func (d DeclImport) SetOptions(opts CompactOptions) { + d.raw.options = opts.ptr +} + +// Semicolon returns this import's ending semicolon. +// +// May be nil, if the user forgot it. +func (d DeclImport) Semicolon() Token { + return d.raw.semi.With(d) +} + +// Span implements [Spanner] for DeclImport. +func (d DeclImport) Span() Span { + return JoinSpans(d.Keyword(), d.Modifier(), d.ImportPath(), d.Semicolon()) +} + +func (DeclImport) with(ctx *Context, ptr arena.Untyped) Decl { + return DeclImport{withContext{ctx}, ptr, ctx.decls.imports.At(ptr)} +} + +func (d DeclImport) declIndex() arena.Untyped { + return d.ptr +} diff --git a/experimental/ast/decl_range.go b/experimental/ast/decl_range.go new file mode 100644 index 00000000..1ad4f28d --- /dev/null +++ b/experimental/ast/decl_range.go @@ -0,0 +1,161 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "slices" + + "github.com/bufbuild/protocompile/internal/arena" +) + +// DeclRange represents an extension or reserved range declaration. They are almost identical +// syntactically so they use the same AST node. +// +// In the Protocompile AST, ranges can contain arbitrary expressions. Thus, DeclRange +// implements [Comma[Expr]]. +type DeclRange struct { + withContext + + ptr arena.Untyped + raw *rawDeclRange +} + +type rawDeclRange struct { + keyword rawToken + args []struct { + expr rawExpr + comma rawToken + } + options arena.Pointer[rawCompactOptions] + semi rawToken +} + +// DeclRangeArgs is arguments for [Context.NewDeclRange]. +type DeclRangeArgs struct { + Keyword Token + Options CompactOptions + Semicolon Token +} + +var ( + _ Decl = DeclRange{} + _ Commas[Expr] = DeclRange{} +) + +// Keyword returns the keyword for this range. +func (d DeclRange) Keyword() Token { + return d.raw.keyword.With(d) +} + +// IsExtensions checks whether this is an extension range. +func (d DeclRange) IsExtensions() bool { + return d.Keyword().Text() == "extensions" +} + +// IsReserved checks whether this is a reserved range. +func (d DeclRange) IsReserved() bool { + return d.Keyword().Text() == "reserved" +} + +// Len implements [Slice] for Extensions. +func (d DeclRange) Len() int { + return len(d.raw.args) +} + +// At implements [Slice] for Range. +func (d DeclRange) At(n int) Expr { + return d.raw.args[n].expr.With(d) +} + +// Iter implements [Slice] for Range. +func (d DeclRange) Iter(yield func(int, Expr) bool) { + for i, arg := range d.raw.args { + if !yield(i, arg.expr.With(d)) { + break + + } + } +} + +// Append implements [Inserter] for Range. +func (d DeclRange) Append(expr Expr) { + d.InsertComma(d.Len(), expr, Token{}) +} + +// Insert implements [Inserter] for Range. +func (d DeclRange) Insert(n int, expr Expr) { + d.InsertComma(n, expr, Token{}) +} + +// Delete implements [Inserter] for Range. +func (d DeclRange) Delete(n int) { + d.raw.args = slices.Delete(d.raw.args, n, n+1) +} + +// Comma implements [Commas] for Range. +func (d DeclRange) Comma(n int) Token { + return d.raw.args[n].comma.With(d) +} + +// AppendComma implements [Commas] for Range. +func (d DeclRange) AppendComma(expr Expr, comma Token) { + d.InsertComma(d.Len(), expr, comma) +} + +// InsertComma implements [Commas] for Range. +func (d DeclRange) InsertComma(n int, expr Expr, comma Token) { + d.Context().panicIfNotOurs(expr, comma) + + d.raw.args = slices.Insert(d.raw.args, n, struct { + expr rawExpr + comma rawToken + }{toRawExpr(expr), comma.raw}) +} + +// Options returns the compact options list for this range. +func (d DeclRange) Options() CompactOptions { + return newOptions(d.raw.options, d) +} + +// SetOptions sets the compact options list for this definition. +// +// Setting it to a nil Options clears it. +func (d DeclRange) SetOptions(opts CompactOptions) { + d.raw.options = opts.ptr +} + +// Semicolon returns this range's ending semicolon. +// +// May be nil, if not present. +func (d DeclRange) Semicolon() Token { + return d.raw.semi.With(d) +} + +// Span implements [Spanner] for Range. +func (d DeclRange) Span() Span { + span := JoinSpans(d.Keyword(), d.Semicolon(), d.Options()) + for _, arg := range d.raw.args { + span = JoinSpans(span, arg.expr.With(d), arg.comma.With(d)) + } + return span +} + +func (DeclRange) with(ctx *Context, ptr arena.Untyped) Decl { + return DeclRange{withContext{ctx}, ptr, ctx.decls.ranges.At(ptr)} +} + +func (d DeclRange) declIndex() arena.Untyped { + return d.ptr +} diff --git a/experimental/ast/decl_scope.go b/experimental/ast/decl_scope.go new file mode 100644 index 00000000..a1fd6b9d --- /dev/null +++ b/experimental/ast/decl_scope.go @@ -0,0 +1,130 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "slices" + + "github.com/bufbuild/protocompile/internal/arena" +) + +// DeclScope is the body of a [DeclDef], or the whole contents of a [File]. The +// protocompile AST is very lenient, and allows any declaration to exist anywhere, for the +// benefit of rich diagnostics and refactorings. For example, it is possible to represent an +// "orphaned" field or oneof outside of a message, or an RPC method inside of an enum, and +// so on. +// +// DeclScope implements [Slice], providing access to its declarations. +type DeclScope struct { + withContext + + ptr arena.Untyped + raw *rawDeclScope +} + +type rawDeclScope struct { + braces rawToken + + // These slices are co-indexed; they are parallelizes to save + // three bytes per decl (declKind is 1 byte, but decl is 4; if + // they're stored in AOS format, we waste 3 bytes of padding). + kinds []declKind + ptrs []arena.Untyped +} + +var ( + _ Decl = DeclScope{} + _ Inserter[Decl] = DeclScope{} +) + +// Braces returns this body's surrounding braces, if it has any. +func (d DeclScope) Braces() Token { + return d.raw.braces.With(d) +} + +// Span implements [Spanner] for Body. +func (d DeclScope) Span() Span { + if !d.Braces().Nil() { + return d.Braces().Span() + } + + if d.Len() == 0 { + return Span{} + } + + return JoinSpans(d.At(0), d.At(d.Len()-1)) +} + +// Len returns the number of declarations inside of this body. +func (d DeclScope) Len() int { + return len(d.raw.ptrs) +} + +// At returns the nth element of this body. +func (d DeclScope) At(n int) Decl { + return d.raw.kinds[n].reify().with(d.Context(), d.raw.ptrs[n]) +} + +// Iter is an iterator over the nodes inside this body. +func (d DeclScope) Iter(yield func(int, Decl) bool) { + for i := range d.raw.kinds { + if !yield(i, d.At(i)) { + break + } + } +} + +// Append appends a new declaration to this body. +func (d DeclScope) Append(value Decl) { + d.Insert(d.Len(), value) +} + +// Insert inserts a new declaration at the given index. +func (d DeclScope) Insert(n int, value Decl) { + d.Context().panicIfNotOurs(value) + + d.raw.kinds = slices.Insert(d.raw.kinds, n, value.declKind()) + d.raw.ptrs = slices.Insert(d.raw.ptrs, n, value.declIndex()) +} + +// Delete deletes the declaration at the given index. +func (d DeclScope) Delete(n int) { + d.raw.kinds = slices.Delete(d.raw.kinds, n, n+1) + d.raw.ptrs = slices.Delete(d.raw.ptrs, n, n+1) +} + +func (d DeclScope) declIndex() arena.Untyped { + return d.ptr +} + +// Decls returns an iterator over the nodes within a body of a particular type. +func Decls[T Decl](d DeclScope) func(func(int, T) bool) { + return func(yield func(int, T) bool) { + var idx int + d.Iter(func(_ int, decl Decl) bool { + if actual, ok := decl.(T); ok { + if !yield(idx, actual) { + return false + } + idx++ + } + return true + }) + } +} + +func (DeclScope) with(ctx *Context, ptr arena.Untyped) Decl { + return DeclScope{withContext{ctx}, ptr, ctx.decls.bodies.At(ptr)} +} diff --git a/experimental/ast/diagnostics.go b/experimental/ast/diagnostics.go new file mode 100644 index 00000000..1acf1b1b --- /dev/null +++ b/experimental/ast/diagnostics.go @@ -0,0 +1,575 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + "math" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/bufbuild/protocompile/experimental/report" +) + +const MaxFileSize int = math.MaxInt32 + +// ErrFileTooBig diagnoses a file that is beyond Protocompile's implementation limits. +type ErrFileTooBig struct { + Path string +} + +func (e ErrFileTooBig) Error() string { + return "files larger than 2GB are not supported" +} + +func (e ErrFileTooBig) Diagnose(d *report.Diagnostic) { + d.With(report.InFile(e.Path)) +} + +// ErrNotUTF8 diagnoses a file that contains non-UTF-8 bytes. +type ErrNotUTF8 struct { + Path string + At int + Byte byte +} + +func (e ErrNotUTF8) Error() string { + return "files must be encoded as valid UTF-8" +} + +func (e ErrNotUTF8) Diagnose(d *report.Diagnostic) { + d.With( + report.InFile(e.Path), + report.Notef("found a 0x%02x byte at offset %d", e.Byte, e.At), + ) +} + +// ErrUnrecognized diagnoses the presence of an unrecognized token. +type ErrUnrecognized struct{ Token Token } + +func (e ErrUnrecognized) Error() string { + return "unrecongnized token" +} + +func (e ErrUnrecognized) Diagnose(d *report.Diagnostic) { + d.With(report.Snippet(e.Token)) +} + +// ErrUnterminated diagnoses a delimiter for which we found one half of a matched +// delimiter but not the other. +type ErrUnterminated struct { + Span Span + + // If present, this indicates that we did match with another brace delimiter, but it + // was of the wrong kind + Mismatch Span +} + +// OpenClose returns the expected open/close delimiters for this matched pair. +func (e ErrUnterminated) OpenClose() (string, string) { + switch t := e.Span.Text(); t { + case "(", ")": + return "(", ")" + case "[", "]": + return "[", "]" + case "{", "}": + return "{", "}" + case "<", ">": + return "<", ">" + case "/*", "*/": + return "/*", "*/" + default: + start, end := e.Span.Offsets() + panic(fmt.Sprintf("protocompile/ast: invalid token in ErrUnterminated: %q (byte offset %d:%d)", t, start, end)) + } +} + +func (e ErrUnterminated) Error() string { + return fmt.Sprintf("encountered unterminated `%s` delimiter", e.Span.Text()) +} + +func (e ErrUnterminated) Diagnose(d *report.Diagnostic) { + text := e.Span.Text() + open, close := e.OpenClose() + + if text == open { + d.With(report.Snippetf(e.Span, "expected to be closed by `%s", close)) + if !e.Mismatch.Nil() { + d.With(report.Snippetf(e.Mismatch, "closed by this instead")) + } + } else { + d.With(report.Snippetf(e.Span, "expected to be opened by `%s", open)) + } + if text == "*/" { + d.With(report.Note("Protobuf does not support nested block comments")) + } +} + +// ErrUnterminatedStringLiteral diagnoses a string literal that continues to EOF. +type ErrUnterminatedStringLiteral struct{ Token Token } + +func (e ErrUnterminatedStringLiteral) Error() string { + return "unterminated string literal" +} + +func (e ErrUnterminatedStringLiteral) Diagnose(d *report.Diagnostic) { + open := e.Token.Text()[:1] + d.With(report.Snippetf(e.Token, "expected to be terminated by `%s`", open)) + // TODO: check to see if a " or ' escape exists in the string? +} + +// ErrInvalidEscape diagnoses an invalid escape sequence. +type ErrInvalidEscape struct{ Span Span } + +func (e ErrInvalidEscape) Error() string { + return "invalid escape sequence" +} + +func (e ErrInvalidEscape) Diagnose(d *report.Diagnostic) { + text := e.Span.Text() + + if len(text) >= 2 { + switch c := text[1]; c { + case 'x': + if len(text) < 3 { + d.With(report.Snippetf(e.Span, "`\\x` must be followed by at least one hex digit")) + return + } + case 'u', 'U': + expected := 4 + if c == 'U' { + expected = 8 + } + + if len(text[2:]) != expected { + d.With(report.Snippetf(e.Span, "`\\%c` must be followed by exactly %d hex digits", c, expected)) + return + } + + value, _ := strconv.ParseUint(text[2:], 16, 32) + if !utf8.ValidRune(rune(value)) { + d.With(report.Snippetf(e.Span, "must be in the range U+0000 to U+10FFFF, except U+DC00 to U+DFFF")) + return + } + } + } + + d.With(report.Snippet(e.Span)) +} + +// ErrNonASCIIIdent diagnoses an identifier that contains non-ASCII runes. +type ErrNonASCIIIdent struct{ Token Token } + +func (e ErrNonASCIIIdent) Error() string { + return "non-ASCII identifiers are not allowed" +} + +func (e ErrNonASCIIIdent) Diagnose(d *report.Diagnostic) { + d.With(report.Snippet(e.Token)) +} + +// ErrIntegerOverflow diagnoses an integer literal that does not fit into the required range. +type ErrIntegerOverflow struct { + Token Token + // TODO: Extend this to other bit-sizes. +} + +func (e ErrIntegerOverflow) Error() string { + return "integer literal out of range" +} + +func (e ErrIntegerOverflow) Diagnose(d *report.Diagnostic) { + text := strings.ToLower(e.Token.Text()) + switch { + case strings.HasPrefix(text, "0x"): + d.With( + report.Snippetf(e.Token, "must be in the range `0x0` to `0x%x`", uint64(math.MaxUint64)), + report.Note("hexadecimal literals must always fit in a uint64"), + ) + case strings.HasPrefix(text, "0b"): + d.With( + report.Snippetf(e.Token, "must be in the range `0b0` to `0b%b`", uint64(math.MaxUint64)), + report.Note("binary literals must always fit in a uint64"), + ) + default: + // NB: Decimal literals cannot overflow, at least in the lexer. + d.With( + report.Snippetf(e.Token, "must be in the range `0` to `0%o`", uint64(math.MaxUint64)), + report.Note("octal literals must always fit in a uint64"), + ) + } +} + +// ErrInvalidNumber diagnoses a numeric literal with invalid syntax. +type ErrInvalidNumber struct { + Token Token +} + +func (e ErrInvalidNumber) Error() string { + if strings.ContainsRune(e.Token.Text(), '.') { + return "invalid floating-point literal" + } + + return "invalid integer literal" +} + +func (e ErrInvalidNumber) Diagnose(d *report.Diagnostic) { + text := strings.ToLower(e.Token.Text()) + d.With(report.Snippet(e.Token)) + if strings.ContainsRune(e.Token.Text(), '.') && strings.HasPrefix(text, "0x") { + d.With(report.Note("Protobuf does not support binary floats")) + } +} + +// ErrExpectedIdent diagnoses a node that needs to be a single identifier. +type ErrExpectedIdent struct { + Name, Prior, Named Spanner +} + +func (e ErrExpectedIdent) Error() string { + return fmt.Sprintf("the name of this %s must be a single identifier", describe(e.Named)) +} + +func (e ErrExpectedIdent) Diagnose(d *report.Diagnostic) { + d.With(report.Snippet(e.Name)) + + if e.Prior != nil { + d.With(report.Snippetf(e.Prior, "this must be followed by an identifier")) + } +} + +// ErrNoSyntax diagnoses a missing syntax declaration in a file. +type ErrNoSyntax struct { + Path string +} + +func (e ErrNoSyntax) Error() string { + return "missing syntax declaration" +} + +func (e ErrNoSyntax) Diagnose(d *report.Diagnostic) { + d.With( + report.InFile(e.Path), + report.Note("omitting the `syntax` keyword implies \"proto2\" by default"), + report.Help("explicitly add `syntax = \"proto2\";` at the top of the file"), + ) +} + +// ErrUnknownSyntax diagnoses a [DeclSyntax] with an unknown value. +type ErrUnknownSyntax struct { + Node DeclSyntax + Value Token +} + +func (e ErrUnknownSyntax) Error() string { + value, _ := e.Value.AsString() + if len(value) > 16 { + value = value[:16] + "..." + } + + return fmt.Sprintf("%q is not a valid %s", value, e.Node.Keyword().Text()) +} + +func (e ErrUnknownSyntax) Diagnose(d *report.Diagnostic) { + d.With(report.Snippet(e.Value)) + + var values []string + switch { + case e.Node.IsSyntax(): + values = knownSyntaxes + case e.Node.IsEdition(): + values = knownEditions + } + + if len(values) > 0 { + var list strings.Builder + for i, value := range values { + if i != 0 { + list.WriteString(", ") + } + list.WriteRune('"') + list.WriteString(value) + list.WriteRune('"') + } + + d.With(report.Notef("protocompile only recognizes %v", list.String())) + } +} + +// ErrNoPackage diagnoses a missing package declaration in a file. +type ErrNoPackage struct { + Path string +} + +func (e ErrNoPackage) Error() string { + return "missing package declaration" +} + +func (e ErrNoPackage) Diagnose(d *report.Diagnostic) { + d.With( + report.InFile(e.Path), + report.Note("omitting the `package` keyword implies an empty package"), + report.Help("using the empty package is discouraged"), + report.Help("explicitly add `package ...;` at the top of the file, after the syntax declaration"), + ) +} + +// ErrMoreThanOne indicates that some portion of a production appeared more than once, +// even though it should occur at most once. +type ErrMoreThanOne struct { + First, Second Spanner + what string +} + +func (e ErrMoreThanOne) Error() string { + return "encountered more than one " + e.what +} + +func (e ErrMoreThanOne) Diagnose(d *report.Diagnostic) { + d.With( + report.Snippet(e.Second), + report.Snippetf(e.First, "first one is here"), + ) +} + +// ErrInvalidChild diagnoses a declaration that should not occur inside of another. +type ErrInvalidChild struct { + Parent, Decl Spanner +} + +func (e ErrInvalidChild) Error() string { + return fmt.Sprintf("unexpected %s inside %s", describe(e.Decl), describe(e.Parent)) +} + +func (e ErrInvalidChild) Diagnose(d *report.Diagnostic) { + d.With( + report.Snippet(e.Decl), + report.Snippetf(e.Parent, "inside this %s", describe(e.Parent)), + ) + + switch e.Decl.(type) { + case DeclSyntax, DeclPackage, DeclImport: + d.With(report.Help("perhaps you meant to place this in the top-level scope?")) + } +} + +// ErrDuplicateImport indicates that a particular import occurred twice. +type ErrDuplicateImport struct { + First, Second DeclImport + Path string +} + +func (e ErrDuplicateImport) Error() string { + return fmt.Sprintf("%q imported twice", e.Path) +} + +func (e ErrDuplicateImport) Diagnose(d *report.Diagnostic) { + d.With( + report.Snippetf(e.Second, "help: remove this import"), + report.Snippetf(e.First, "previously imported here"), + ) +} + +// ** PRIVATE ** // + +// errUnexpected is a low-level parser error for when we hit a token we don't +// know how to handle. +type errUnexpected struct { + node Spanner + where string + want []string + got string +} + +// errEOF constructs an errUnexpected for an exhausted cursor. +func errEOF(cursor *Cursor, where string, want []string) errUnexpected { + if cursor.stream != nil { + panic("protocompile/ast: passed synthetic cursor to internal function errEOF(); this is a bug in protocompile") + } + + // Generating a nice error depends on whether this cursor points past the + // end of the stream array or not. + ctx := cursor.Context() + next := cursor.start + if int(next) > len(ctx.stream) { + // Find the last non-space rune; we moor the span immediately after it. + eof := strings.LastIndexFunc(ctx.Text(), func(r rune) bool { return !unicode.IsSpace(r) }) + if eof == -1 { + eof = 0 // The whole file is whitespace. + } + + return errUnexpected{ + node: ctx.NewSpan(eof+1, eof+1), + where: where, + want: want, + got: "end-of-file", + } + } + + return errUnexpected{ + node: next.With(ctx), + where: where, + want: want, + } +} + +func (e errUnexpected) Error() string { + if e.got == "" { + e.got = describe(e.node) + } + if e.where != "" { + return fmt.Sprintf("unexpected %s %s", e.got, e.where) + } + return fmt.Sprintf("unexpected %s", e.got) +} + +func (e errUnexpected) Diagnose(d *report.Diagnostic) { + var buf strings.Builder + switch len(e.want) { + case 0: + case 1: + fmt.Fprintf(&buf, "expected %s", e.want[0]) + case 2: + fmt.Fprintf(&buf, "expected %s or %s", e.want[0], e.want[1]) + default: + buf.WriteString("expected ") + for _, want := range e.want[:len(e.want)-1] { + fmt.Fprintf(&buf, "%s, ", want) + } + fmt.Fprintf(&buf, "or %s", e.want[len(e.want)-1]) + } + + d.With(report.Snippetf(e.node, "%s", buf.String())) +} + +// describe attempts to generate a user-friendly name for `node`. +func describe(node Spanner) string { + switch node := node.(type) { + case File: + return "top-level scope" + case Path, ExprPath, TypePath: + return "path" + case DeclSyntax: + if node.IsEdition() { + return "edition declaration" + } + return "syntax declaration" + case DeclPackage: + return "package declaration" + case DeclImport: + switch { + case node.IsWeak(): + return "weak import" + case node.IsPublic(): + return "public import" + default: + return "import" + } + case DeclRange: + switch { + case node.IsExtensions(): + return "extension range" + case node.IsReserved(): + return "reserved range" + default: + return "range declaration" + } + case DeclDef: + switch def := node.Classify().(type) { + case DefMessage: + return "message definition" + case DefEnum: + return "enum definition" + case DefService: + return "service definition" + case DefExtend: + return "extension declaration" + case DefOption: + var first PathComponent + def.Path.Components(func(pc PathComponent) bool { + first = pc + return false + }) + if first.IsExtension() { + return "custom option setting" + } else { + return "option setting" + } + case DefField, DefGroup: + return "message field" + case DefEnumValue: + return "enum value" + case DefMethod: + return "service method" + case DefOneof: + return "oneof definition" + default: + return "invalid definition" + } + case DeclEmpty: + return "empty declaration" + case DeclScope: + return "definition body" + case Decl: + return "declaration" + case ExprLiteral: + return describe(node.Token) + case ExprPrefixed: + if lit, ok := node.Expr().(ExprLiteral); ok && lit.Token.Kind() == TokenNumber && + node.Prefix() == ExprPrefixMinus { + return describe(lit.Token) + } + return "expression" + case Expr: + return "expression" + case Type: + return "type" + case CompactOptions: + return "compact options" + case Token: + switch node.Kind() { + case TokenSpace: + return "whitespace" + case TokenComment: + return "comment" + case TokenIdent: + if name := node.Text(); keywords[name] { + return fmt.Sprintf("`%s`", name) + } + return "identifier" + case TokenString: + return "string literal" + case TokenNumber: + if strings.ContainsRune(node.Text(), '.') { + return "floating-point literal" + } + + return "integer literal" + case TokenPunct: + if !node.IsLeaf() { + start, end := node.StartEnd() + return fmt.Sprintf("`%s...%s`", start.Text(), end.Text()) + } + + fallthrough + default: + return fmt.Sprintf("`%s`", node.Text()) + } + } + + return "" +} diff --git a/experimental/ast/doc.go b/experimental/ast/doc.go new file mode 100644 index 00000000..b54e5199 --- /dev/null +++ b/experimental/ast/doc.go @@ -0,0 +1,36 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package ast2 is a high-performance implementation of a Protobuf IDL abstract +// syntax tree. It is intended to be end-all, be-all AST for the following use-cases: +// +// 1. Parse target for a Protobuf compiler. +// +// 2. Incremental, fault-tolerant parsing for a Protobuf language server. +// +// 3. In-place rewriting of the AST to implement refactoring tools. +// +// 4. Formatting, even of partially-invalid code. +// +// "High-performance" means that the AST is optimized to minimize resident memory use, +// pointer nesting (to minimize GC pause contribution), and maximize locality. This AST +// is suitable for hydrating large Buf modules in a long-lived process without exhausting +// memory or introducing unreasonable GC latency. +// +// In general, if an API tradeoff is necessary for satisfying any of the above goals, we +// make the tradeoff, but we attempt to maintain a high degree of ease of use. +// +// This library will replace the existing [github.com/bufbuild/protocompile/ast] library. +// Outside of this file, documentation is written assuming this has already happened. +package ast diff --git a/experimental/ast/expr.go b/experimental/ast/expr.go new file mode 100644 index 00000000..d4ccd406 --- /dev/null +++ b/experimental/ast/expr.go @@ -0,0 +1,678 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "math" + "slices" + + "github.com/bufbuild/protocompile/internal/arena" +) + +const ( + exprLiteral exprKind = iota + 1 + exprPrefixed + exprPath + exprRange + exprArray + exprDict + exprField + expr +) + +const ( + ExprPrefixUnknown ExprPrefix = iota + ExprPrefixMinus +) + +// TypePrefix is a prefix for an expression, such as a minus sign. +type ExprPrefix int8 + +// ExprPrefixByName looks up a prefix kind by name. +// +// If name is not a known prefix, returns [ExprPrefixUnknown]. +func ExprPrefixByName(name string) ExprPrefix { + switch name { + case "-": + return ExprPrefixMinus + default: + return ExprPrefixUnknown + } +} + +// Expr is an expression, primarily occurring on the right hand side of an =. +// +// Expr provides methods for interpreting it as various Go types, as a shorthand +// for introspecting the concrete type of the expression. These methods return +// (T, bool), returning false if the expression cannot be interpreted as that +// type. The [Commas]-returning methods return nil instead. +// TODO: Return a diagnostic instead. +// +// This is implemented by types in this package of the form Expr*. +type Expr interface { + Spanner + + AsBool() (bool, bool) + AsInt32() (int32, bool) + AsInt64() (int64, bool) + AsUInt32() (uint32, bool) + AsUInt64() (uint64, bool) + AsFloat32() (float32, bool) + AsFloat64() (float64, bool) + AsString() (string, bool) + AsArray() Commas[Expr] + AsMessage() Commas[ExprKV] + + exprKind() exprKind + exprIndex() arena.Untyped +} + +// exprs is storage for the various kinds of Exprs in a Context. +type exprs struct { + prefixes arena.Arena[rawExprPrefixed] + ranges arena.Arena[rawExprRange] + arrays arena.Arena[rawExprArray] + dicts arena.Arena[rawExprDict] + fields arena.Arena[rawExprKV] +} + +func (ExprLiteral) exprKind() exprKind { return exprLiteral } +func (ExprPath) exprKind() exprKind { return exprPath } +func (ExprPrefixed) exprKind() exprKind { return exprPrefixed } +func (ExprRange) exprKind() exprKind { return exprRange } +func (ExprArray) exprKind() exprKind { return exprArray } +func (ExprDict) exprKind() exprKind { return exprDict } +func (ExprKV) exprKind() exprKind { return exprField } + +func (e ExprLiteral) exprIndex() arena.Untyped { return arena.Untyped(e.Token.raw) } +func (ExprPath) exprIndex() arena.Untyped { return 0 } +func (e ExprPrefixed) exprIndex() arena.Untyped { return e.ptr } +func (e ExprRange) exprIndex() arena.Untyped { return e.ptr } +func (e ExprArray) exprIndex() arena.Untyped { return e.ptr } +func (e ExprDict) exprIndex() arena.Untyped { return e.ptr } +func (e ExprKV) exprIndex() arena.Untyped { return e.ptr } + +// ExprLiteral is an expression corresponding to a string or number literal. +type ExprLiteral struct { + baseExpr + + // The token backing this expression. Must be [TokenString] or [TokenNumber]. + Token +} + +var _ Expr = ExprLiteral{} + +// AsInt32 implements [Expr] for ExprLiteral. +func (e ExprLiteral) AsInt32() (int32, bool) { + n, ok := e.Token.AsInt() + return int32(n), ok && n <= uint64(math.MaxInt32) +} + +// AsInt64 implements [Expr] for ExprLiteral. +func (e ExprLiteral) AsInt64() (int64, bool) { + n, ok := e.Token.AsInt() + return int64(n), ok && n <= uint64(math.MaxInt64) +} + +// AsUInt32 implements [Expr] for ExprLiteral. +func (e ExprLiteral) AsUInt32() (uint32, bool) { + n, ok := e.Token.AsInt() + return uint32(n), ok && n <= uint64(math.MaxUint32) +} + +// AsUInt64 implements [Expr] for ExprLiteral. +func (e ExprLiteral) AsUInt64() (uint64, bool) { + return e.Token.AsInt() +} + +// AsFloat32 implements [Expr] for ExprLiteral. +func (e ExprLiteral) AsFloat32() (float32, bool) { + n, ok := e.Token.AsFloat() + return float32(n), ok // Loss of precision is intentional. +} + +// AsFloat64 implements [Expr] for ExprLiteral. +func (e ExprLiteral) AsFloat64() (float64, bool) { + return e.Token.AsFloat() +} + +// AsString implements [Expr] for ExprLiteral. +func (e ExprLiteral) AsString() (string, bool) { + return e.Token.AsString() +} + +// ExprPath is a Protobuf path in expression position. +// +// Note: if this is BuiltinMax, +type ExprPath struct { + baseExpr + + // The path backing this expression. + Path +} + +var _ Expr = ExprPath{} + +// AsBool implements [Expr] for ExprPath. +func (e ExprPath) AsBool() (bool, bool) { + switch e.AsIdent().Text() { + case "true": + return true, true + case "false": + return false, true + default: + return false, false + } +} + +// AsFloat32 implements [Expr] for ExprPath. +func (e ExprPath) AsFloat32() (float32, bool) { + n, ok := e.AsFloat64() + return float32(n), ok +} + +// AsFloat64 implements [Expr] for ExprPath. +func (e ExprPath) AsFloat64() (float64, bool) { + switch e.AsIdent().Text() { + case "inf": + return math.Inf(1), true + case "nan": + return math.NaN(), true + default: + return 0, false + } +} + +// ExprPrefixed is an expression prefixed with an operator. +type ExprPrefixed struct { + baseExpr + withContext + + ptr arena.Untyped + raw *rawExprPrefixed +} + +type rawExprPrefixed struct { + prefix rawToken + expr rawExpr +} + +// ExprPrefixedArgs is arguments for [Context.NewExprPrefixed] +type ExprPrefixedArgs struct { + Prefix Token + Expr Expr +} + +var _ Expr = ExprPrefixed{} + +// Prefix returns this expression's prefix. +func (e ExprPrefixed) Prefix() ExprPrefix { + return ExprPrefixByName(e.PrefixToken().Text()) +} + +// Prefix returns the token representing this expression's prefix. +func (e ExprPrefixed) PrefixToken() Token { + return e.raw.prefix.With(e) +} + +// Expr returns the expression the prefix is applied to. +func (e ExprPrefixed) Expr() Expr { + return e.raw.expr.With(e) +} + +// SetExpr sets the expression that the prefix is applied to. +// +// If passed nil, this clears the expression. +func (e ExprPrefixed) SetExpr(expr Expr) { + e.raw.expr = toRawExpr(expr) +} + +// Span implements [Spanner] for ExprSigned. +func (e ExprPrefixed) Span() Span { + return JoinSpans(e.PrefixToken(), e.Expr()) +} + +// AsInt32 implements [Expr] for ExprSigned. +func (e ExprPrefixed) AsInt32() (int32, bool) { + n, ok := e.AsInt64() + if !ok || n < int64(math.MinInt32) || n > int64(math.MaxInt32) { + return 0, false + } + + return int32(n), true +} + +// AsInt64 implements [Expr] for ExprSigned. +func (e ExprPrefixed) AsInt64() (int64, bool) { + n, ok := e.Expr().AsInt64() + if ok && n != -n { + // If n == -n, that means n == MinInt32. + return -n, ok + } + + // Need to handle the funny case where someone wrote -9223372036854775808, since + // 9223372036854775808 is not representable as an int64. + u, ok := e.Expr().AsUInt64() + if ok && u == uint64(math.MaxInt64) { + return math.MinInt64, true + } + + return 0, false +} + +// AsUInt32 implements [Expr] for ExprSigned. +func (e ExprPrefixed) AsUInt32() (uint32, bool) { + // NOTE: - is not treated as two's complement here; we only allow -0 + n, ok := e.Expr().AsUInt32() + return 0, ok && n == 0 +} + +// AsUInt64 implements [Expr] for ExprSigned. +func (e ExprPrefixed) AsUInt64() (uint64, bool) { + // NOTE: - is not treated as two's complement here; we only allow -0 + n, ok := e.Expr().AsUInt64() + return 0, ok && n == 0 +} + +// AsFloat32 implements [Expr] for ExprSigned. +func (e ExprPrefixed) AsFloat32() (float32, bool) { + n, ok := e.Expr().AsFloat32() + return -n, ok +} + +// AsFloat64 implements [Expr] for ExprSigned. +func (e ExprPrefixed) AsFloat64() (float64, bool) { + n, ok := e.Expr().AsFloat64() + return -n, ok +} + +// ExprRange represents a range of values, such as 1 to 4 or 5 to max. +// +// Note that max is not special syntax; it will appear as an [ExprPath] with the name "max". +type ExprRange struct { + baseExpr + withContext + + ptr arena.Untyped + raw *rawExprRange +} + +type rawExprRange struct { + lo, hi rawExpr + to rawToken +} + +// ExprRangeArgs is arguments for [Context.NewExprRange] +type ExprRangeArgs struct { + Start Expr + To Token + End Expr +} + +var _ Expr = ExprRange{} + +// Bounds returns this range's bounds. These are inclusive bounds. +func (e ExprRange) Bounds() (start, end Expr) { + return e.raw.lo.With(e), e.raw.hi.With(e) +} + +// SetBounds set the expressions for this range's bounds. +// +// Clears the respective expressions when passed a nil expression. +func (e ExprRange) SetBounds(start, end Expr) { + e.raw.lo = toRawExpr(start) + e.raw.hi = toRawExpr(end) +} + +// Keyword returns the "to" keyword for this range. +func (e ExprRange) Keyword() Token { + return e.raw.to.With(e) +} + +// Span implements [Spanner] for ExprRange. +func (e ExprRange) Span() Span { + lo, hi := e.Bounds() + return JoinSpans(lo, e.Keyword(), hi) +} + +// ExprArray represents an array of expressions between square brackets. +// +// ExprArray implements [Commas[Expr]]. +type ExprArray struct { + baseExpr + withContext + + ptr arena.Untyped + raw *rawExprArray +} + +type rawExprArray struct { + brackets rawToken + args []struct { + expr rawExpr + comma rawToken + } +} + +var ( + _ Expr = ExprArray{} + _ Commas[Expr] = ExprArray{} +) + +// Brackets returns the token tree corresponding to the whole [...]. +// +// May be missing for a synthetic expression. +func (e ExprArray) Brackets() Token { + return e.raw.brackets.With(e) +} + +// Len implements [Slice] for ExprArray. +func (e ExprArray) Len() int { + return len(e.raw.args) +} + +// At implements [Slice] for ExprArray. +func (e ExprArray) At(n int) Expr { + return e.raw.args[n].expr.With(e) +} + +// Iter implements [Slice] for ExprArray. +func (e ExprArray) Iter(yield func(int, Expr) bool) { + for i, arg := range e.raw.args { + if !yield(i, arg.expr.With(e)) { + break + + } + } +} + +// Append implements [Inserter] for ExprArray. +func (e ExprArray) Append(expr Expr) { + e.InsertComma(e.Len(), expr, Token{}) +} + +// Insert implements [Inserter] for ExprArray. +func (e ExprArray) Insert(n int, expr Expr) { + e.InsertComma(n, expr, Token{}) +} + +// Delete implements [Inserter] for ExprArray. +func (e ExprArray) Delete(n int) { + e.raw.args = slices.Delete(e.raw.args, n, n+1) +} + +// Comma implements [Commas] for ExprArray. +func (e ExprArray) Comma(n int) Token { + return e.raw.args[n].comma.With(e) +} + +// AppendComma implements [Commas] for TypeGeneric. +func (e ExprArray) AppendComma(expr Expr, comma Token) { + e.InsertComma(e.Len(), expr, comma) +} + +// InsertComma implements [Commas] for TypeGeneric. +func (e ExprArray) InsertComma(n int, expr Expr, comma Token) { + e.Context().panicIfNotOurs(expr, comma) + + e.raw.args = slices.Insert(e.raw.args, n, struct { + expr rawExpr + comma rawToken + }{toRawExpr(expr), comma.raw}) +} + +// AsArray implements [Expr] for ExprArray. +func (e ExprArray) AsArray() Commas[Expr] { + return e +} + +// Span implements [Spanner] for ExprArray. +func (e ExprArray) Span() Span { + return e.Brackets().Span() +} + +// ExprDict represents a an array of message fields between curly braces. +type ExprDict struct { + baseExpr + withContext + + ptr arena.Untyped + raw *rawExprDict +} + +type rawExprDict struct { + braces rawToken + fields []struct { + ptr arena.Untyped + comma rawToken + } +} + +var ( + _ Expr = ExprDict{} + _ Commas[ExprKV] = ExprDict{} +) + +// Braces returns the token tree corresponding to the whole {...}. +// +// May be missing for a synthetic expression. +func (e ExprDict) Braces() Token { + return e.raw.braces.With(e) +} + +// Len implements [Slice] for ExprMessage. +func (e ExprDict) Len() int { + return len(e.raw.fields) +} + +// At implements [Slice] for ExprMessage. +func (e ExprDict) At(n int) ExprKV { + ptr := e.raw.fields[n].ptr + return ExprKV{ + baseExpr{}, + e.withContext, + ptr, + e.Context().exprs.fields.At(ptr), + } +} + +// Iter implements [Slice] for ExprMessage. +func (e ExprDict) Iter(yield func(int, ExprKV) bool) { + for i, f := range e.raw.fields { + e := ExprKV{ + baseExpr{}, + e.withContext, + f.ptr, + e.Context().exprs.fields.At(f.ptr), + } + if !yield(i, e) { + break + } + } +} + +// Append implements [Inserter] for ExprMessage. +func (e ExprDict) Append(expr ExprKV) { + e.InsertComma(e.Len(), expr, Token{}) +} + +// Insert implements [Inserter] for ExprMessage. +func (e ExprDict) Insert(n int, expr ExprKV) { + e.InsertComma(n, expr, Token{}) +} + +// Delete implements [Inserter] for ExprMessage. +func (e ExprDict) Delete(n int) { + e.raw.fields = slices.Delete(e.raw.fields, n, n+1) +} + +// Comma implements [Commas] for ExprMessage. +func (e ExprDict) Comma(n int) Token { + return e.raw.fields[n].comma.With(e) +} + +// AppendComma implements [Commas] for TypeGeneric. +func (e ExprDict) AppendComma(expr ExprKV, comma Token) { + e.InsertComma(e.Len(), expr, comma) +} + +// InsertComma implements [Commas] for TypeGeneric. +func (e ExprDict) InsertComma(n int, expr ExprKV, comma Token) { + e.Context().panicIfNotOurs(expr, comma) + if expr.Nil() { + panic("protocompile/ast: cannot append nil ExprField to ExprMessage") + } + + e.raw.fields = slices.Insert(e.raw.fields, n, struct { + ptr arena.Untyped + comma rawToken + }{expr.ptr, comma.raw}) +} + +// AsMessage implements [Expr] for ExprMessage. +func (e ExprDict) AsMessage() Commas[ExprKV] { + return e +} + +// Span implements [Spanner] for ExprMessage. +func (e ExprDict) Span() Span { + return e.Braces().Span() +} + +// ExprKV is a key-value pair within an [ExprDict]. +// +// It implements [Expr], since it can appear inside of e.g. an array if the user incorrectly writes [foo: bar]. +type ExprKV struct { + baseExpr + withContext + + ptr arena.Untyped + raw *rawExprKV +} + +type rawExprKV struct { + key, value rawExpr + colon rawToken +} + +// ExprKVArgs is arguments for [Context.NewExprKV] +type ExprKVArgs struct { + Key Expr + Colon Token + Value Expr +} + +// Key returns the key for this field. +// +// May be nil if the parser encounters a message expression with a missing field, e.g. {foo, bar: baz}. +func (e ExprKV) Key() Expr { + return e.raw.key.With(e) +} + +// SetKey sets the key for this field. +// +// If passed nil, this clears the key. +func (e ExprKV) SetKey(expr Expr) { + e.raw.key = toRawExpr(expr) +} + +// Colon returns the colon between Key() and Value(). +// +// May be nil: it is valid for a field name to be immediately followed by its value and be syntactically +// valid (unlike most "optional" punctuation, this is permitted by Protobuf, not just our permissive AST). +func (e ExprKV) Colon() Token { + return e.raw.colon.With(e) +} + +// Value returns the value for this field. +func (e ExprKV) Value() Expr { + return e.raw.value.With(e) +} + +// SetValue sets the value for this field. +// +// If passed nil, this clears the expression. +func (e ExprKV) SetValue(expr Expr) { + e.raw.value = toRawExpr(expr) +} + +// Span implements [Spanner] for ExprField. +func (e ExprKV) Span() Span { + return JoinSpans(e.Key(), e.Colon(), e.Value()) +} + +type exprKind int8 + +// rawExpr is the raw representation of an expression. +// +// Similar to rawType (see type.go), this makes use of the fact that for rawPath, +// if the first element is negative, the other must be zero. See also rawType.With. +type rawExpr rawPath + +func toRawExpr(e Expr) rawExpr { + if e == nil { + return rawExpr{} + } + if path, ok := e.(ExprPath); ok { + return rawExpr(path.Path.raw) + } + + return rawExpr{^rawToken(e.exprKind()), rawToken(e.exprIndex())} +} + +// With extracts an expression out of a context at the given index to present to the user. +func (e rawExpr) With(c Contextual) Expr { + if e[0] == 0 && e[1] == 0 { + return nil + } + + if e[0] < 0 && e[1] != 0 { + c := c.Context() + ptr := arena.Untyped(e[1]) + switch exprKind(^e[0]) { + case exprLiteral: + return ExprLiteral{Token: rawToken(ptr).With(c)} + case exprPrefixed: + return ExprPrefixed{withContext: withContext{c}, raw: c.exprs.prefixes.At(ptr)} + case exprRange: + return ExprRange{withContext: withContext{c}, raw: c.exprs.ranges.At(ptr)} + case exprArray: + return ExprArray{withContext: withContext{c}, raw: c.exprs.arrays.At(ptr)} + case exprDict: + return ExprDict{withContext: withContext{c}, raw: c.exprs.dicts.At(ptr)} + case exprField: + return ExprKV{withContext: withContext{c}, raw: c.exprs.fields.At(ptr)} + default: + return nil + } + } + + return ExprPath{Path: rawPath(e).With(c)} +} + +// baseExpr implements most of the methods of expr, but returning default values. +// Intended for embedding. +type baseExpr struct{} + +func (baseExpr) AsBool() (bool, bool) { return false, false } +func (baseExpr) AsInt32() (int32, bool) { return 0, false } +func (baseExpr) AsInt64() (int64, bool) { return 0, false } +func (baseExpr) AsUInt32() (uint32, bool) { return 0, false } +func (baseExpr) AsUInt64() (uint64, bool) { return 0, false } +func (baseExpr) AsFloat32() (float32, bool) { return 0, false } +func (baseExpr) AsFloat64() (float64, bool) { return 0, false } +func (baseExpr) AsString() (string, bool) { return "", false } +func (baseExpr) AsArray() Commas[Expr] { return nil } +func (baseExpr) AsMessage() Commas[ExprKV] { return nil } diff --git a/experimental/ast/legalize.go b/experimental/ast/legalize.go new file mode 100644 index 00000000..24a04644 --- /dev/null +++ b/experimental/ast/legalize.go @@ -0,0 +1,435 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + "slices" + + "github.com/bufbuild/protocompile/experimental/report" +) + +var ( + knownSyntaxes = []string{"proto2", "proto3"} + knownEditions = []string{"2023"} +) + +// legalize generates diagnostics for anything in node that is permitted by the parser +// but not by standard Protobuf. +func legalize(r *report.Report, parent, node Spanner) { + switch node := node.(type) { + case File: + var syntax DeclSyntax + var pkg DeclPackage + imports := map[string]DeclImport{} + node.Iter(func(i int, decl Decl) bool { + switch decl := decl.(type) { + case DeclSyntax: + if i > 0 { + r.Errorf("syntax declaration must be the first declaration in a file").With( + report.Snippetf(decl, "expected this to be the first declaration"), + report.Snippetf(node.At(0), "previous declaration"), + ) + } + syntax = decl + + case DeclPackage: + if i != 0 { + if _, ok := node.At(i - 1).(DeclSyntax); !ok { + r.Errorf("package declaration can only come after a syntax declaration").With( + report.Snippetf(decl, "expected this to follow a syntax declaration"), + report.Snippetf(node.At(i-1), "previous declaration"), + ) + } + } + pkg = decl + + // Note that this causes only imports in the file level to be deduplicated; + // this is on purpose. + case DeclImport: + if str, ok := decl.ImportPath().(ExprLiteral); ok { + path, ok := str.Token.AsString() + if !ok { + break + } + + if prev, ok := imports[path]; ok { + r.Warn(ErrDuplicateImport{ + First: prev, + Second: decl, + Path: path, + }) + } else { + imports[path] = decl + } + } + } + + legalize(r, node, decl) + return true + }) + + if syntax.Nil() { + r.Warn(ErrNoSyntax{node.Context().Path()}) + } + if pkg.Nil() { + r.Warn(ErrNoPackage{node.Context().Path()}) + } + + case DeclSyntax: + if _, ok := parent.(File); !ok { + r.Error(ErrInvalidChild{parent, node}) + } + + if !node.Options().Nil() { + r.Errorf("options are not permitted on syntax declarations").With( + report.Snippetf(node.Options(), "help: remove this"), + ) + } + + // NOTE: node can only be nil if an error occurred in the parser. + if node.Value() == nil { + return + } + + what := node.Keyword().Text() + var values []string + switch { + case node.IsSyntax(): + values = knownSyntaxes + case node.IsEdition(): + values = knownEditions + } + + expectQuoted := func(value string, tok Token) bool { + if !slices.Contains(values, value) { + return false + } + + r.Errorf("missing quotes around %s value", what).With( + report.Snippetf(tok, "help: wrap this in quotes"), + ) + return true + } + + switch expr := node.Value().(type) { + case ExprLiteral: + if expr.Token.Kind() == TokenString { + // If ok is false, we assume this has already been diagnosed in the + // lexer, because TokenString -> this is a string. + value, ok := expr.Token.AsString() + if ok && !slices.Contains(values, value) { + r.Error(ErrUnknownSyntax{Node: node, Value: expr.Token}) + } else { + legalizePureString(r, "`"+what+"` value", expr.Token) + } + return + } else if expectQuoted(expr.Token.Text(), expr.Token) { + // This might be an unquoted edition. + return + } + case ExprPath: + // Single identifier means the user forgot the quotes around proto3 + // or such. + if name := expr.Path.AsIdent(); !name.Nil() && expectQuoted(name.Name(), name) { + return + } + } + + r.Error(errUnexpected{ + node: node.Value(), + where: "in " + describe(node), + want: []string{"string literal"}, + }) + + case DeclPackage: + if _, ok := parent.(File); !ok { + r.Error(ErrInvalidChild{parent, node}) + } + + if !node.Options().Nil() { + r.Errorf("options are not permitted on syntax declarations").With( + report.Snippetf(node.Options(), "help: remove this"), + ) + } + + if node.Path().Nil() { + r.Errorf("missing package name").With( + report.Snippetf(node, "help: add a path after `package`"), + ) + return + } + + var idx int + node.Path().Components(func(pc PathComponent) bool { + if pc.Separator().Text() == "/" { + r.Errorf("package names cannot contain slashes").With( + report.Snippet(pc.Separator()), + ) + return false + } + + if idx == 0 && !pc.Separator().Nil() { + r.Errorf("package names cannot be absolute paths").With( + report.Snippetf(pc.Separator(), "help: remove this dot"), + ) + return false + } + + if pc.IsExtension() { + r.Errorf("package names cannot contain extension names").With( + report.Snippet(pc.Name()), + ) + return false + } + + idx++ + return true + }) + + case DeclImport: + if _, ok := parent.(File); !ok { + r.Error(ErrInvalidChild{parent, node}) + } + + if node.IsWeak() { + r.Warnf("weak imports are discouraged and broken in some runtimes").With( + report.Snippet(node.Modifier()), + ) + } + + if !node.Options().Nil() { + r.Errorf("options are not permitted on syntax declarations").With( + report.Snippetf(node.Options(), "help: remove this"), + ) + } + + switch expr := node.ImportPath().(type) { + case nil: + // Select a token to place the suggestion after. + importAfter := node.Modifier() + if importAfter.Nil() { + importAfter = node.Keyword() + } + + r.Errorf("import is missing a file path").With( + report.Snippetf(importAfter, "help: insert the name of the file to import after this keyword"), + ) + return + case ExprLiteral: + if expr.Token.Kind() == TokenString { + legalizePureString(r, "import path", expr.Token) + } + case ExprPath: + r.Errorf("cannot import by Protobuf symbol").With( + report.Snippetf(expr, "expected a quoted filesystem path"), + ) + return + } + + r.Error(errUnexpected{ + node: node.ImportPath(), + where: "in " + describe(node), + want: []string{"string literal"}, + }) + + case DeclDef: + switch parent := parent.(type) { + case File: + case DeclDef: + if _, ok := parent.Classify().(DefMessage); !ok { + r.Error(ErrInvalidChild{parent, node}) + } + default: + r.Error(ErrInvalidChild{parent, node}) + } + + // This part of legalization "essentially" re-implements Classify, but + // generates diagnostics instead of failing. + kw := node.Keyword() + switch kwText := kw.Text(); kwText { + case "message", "enum", "service", "extends", "oneof": + if kwText != "extends" && node.Name().AsIdent().Nil() { + r.Error(errUnexpected{ + node: node.Name(), + where: fmt.Sprintf("in %s name", kwText), + want: []string{"identifier"}, + }) + } + + if sig := node.Signature(); !sig.Nil() { + r.Error(errUnexpected{ + node: sig, + where: fmt.Sprintf("in %s definition", kwText), + }) + } + if value := node.Value(); value != nil { + r.Error(errUnexpected{ + node: value, + where: fmt.Sprintf("in %s definition", kwText), + }) + } else if eq := node.Equals(); !eq.Nil() { + r.Error(errUnexpected{ + node: value, + where: fmt.Sprintf("in %s definition", kwText), + }) + } + if options := node.Options(); !options.Nil() { + r.Errorf("compact options are not permitted on %s definitions", kwText).With( + report.Snippetf(node.Options(), "help: remove this"), + ) + } + + // Parent must be file or message, unless it's a service, + // in which case it must be file, or unless it's a oneof, in which + // case it must be message. + switch parent := parent.(type) { + case File: + if kwText == "oneof" { + r.Error(ErrInvalidChild{parent, node}) + } + case DeclDef: + if kwText == "service" || parent.Keyword().Text() != "message" { + r.Error(ErrInvalidChild{parent, node}) + } + default: + r.Error(ErrInvalidChild{parent, node}) + } + case "group": + if node.Name().AsIdent().Nil() { + r.Error(errUnexpected{ + node: node.Name(), + where: "in group name", + want: []string{"identifier"}, + }) + } + + if sig := node.Signature(); !sig.Nil() { + r.Error(errUnexpected{ + node: sig, + where: fmt.Sprintf("in %s definition", kwText), + }) + } + + if value := node.Value(); value == nil { + var numberAfter Spanner + if name := node.Name(); !name.Nil() { + + } + + numberAfter = node.Name() + if numberAfter.Nil() { + numberAfter = kw + } + + // TODO: This should be moved to somewhere where we can suggest + // the next unallocated value as the field number. + r.Errorf("missing field number").With( + report.Snippetf(node.Options(), "help: remove this"), + ) + } + } + + node.Body().Iter(func(_ int, decl Decl) bool { + legalize(r, node, decl) + return true + }) + + case DeclRange: + parent, ok := parent.(DeclDef) + if !ok { + r.Error(ErrInvalidChild{parent, node}) + return + } + def := parent.Classify() + switch def.(type) { + case DefMessage, DefEnum: + default: + r.Error(ErrInvalidChild{parent, node}) + return + } + + if node.IsReserved() && !node.Options().Nil() { + r.Errorf("options are not permitted on reserved ranges").With( + report.Snippetf(node.Options(), "help: remove this"), + ) + } + + // TODO: Most of this should probably get hoisted to wherever it is that we do + // type checking once that exists. + node.Iter(func(_ int, expr Expr) bool { + switch expr := expr.(type) { + case ExprRange: + ensureInt32 := func(expr Expr) { + _, ok := expr.AsInt32() + if !ok { + r.Errorf("mismatched types").With( + report.Snippetf(expr, "expected `int32`"), + report.Snippetf(node.Keyword(), "expected due to this"), + ) + } + } + + start, end := expr.Bounds() + ensureInt32(start) + if path, ok := end.(ExprPath); ok && path.AsIdent().Name() == "max" { + // End is allowed to be "max". + } else { + ensureInt32(end) + } + return true + case ExprPath: + if node.IsReserved() && !expr.Path.AsIdent().Nil() { + return true + } + // TODO: diagnose against a lone "max" ExprPath in an extension + // range. + case ExprLiteral: + if node.IsReserved() && expr.Token.Kind() == TokenString { + if text, ok := expr.Token.AsString(); ok && !isASCIIIdent(text) { + r.Error(ErrNonASCIIIdent{Token: expr.Token}) + } else { + legalizePureString(r, "reserved field name", expr.Token) + } + return true + } + } + + _, ok := expr.AsInt32() + if !ok { + allowedTypes := "expected `int32` or `int32` range" + if node.IsReserved() { + allowedTypes = "expected `int32`, `int32` range, `string`, or identifier" + } + r.Errorf("mismatched types").With( + report.Snippetf(expr, allowedTypes), + report.Snippetf(node.Keyword(), "expected due to this"), + ) + } + + return true + }) + } +} + +func legalizePureString(r *report.Report, what string, tok Token) { + if value, ok := tok.AsString(); ok { + if !tok.IsPureString() { + r.Warnf("%s should be a single, escape-less string", what).With( + report.Snippetf(tok, `help: change this to %q`, value), + ) + } + } +} diff --git a/experimental/ast/lexer.go b/experimental/ast/lexer.go new file mode 100644 index 00000000..b2d498a0 --- /dev/null +++ b/experimental/ast/lexer.go @@ -0,0 +1,590 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/bufbuild/protocompile/experimental/report" +) + +// lexer is a Protobuf lexer. +type lexer struct { + *Context + cursor int + openStack []Token +} + +// Lex performs lexical analysis, and places any diagnostics in report. +func (l *lexer) Lex(errs *report.Report) { + // Check that the file isn't too big. We give up immediately if that's + // the case. + if len(l.Text()) > MaxFileSize { + errs.Error(ErrFileTooBig{Path: l.Path()}) + return + } + + // Also check that the text of the file is actually UTF-8. + // We go rune by rune to find the first invalid offset. + for text := l.file.File().Text; text != ""; { + r := decodeRune(text) + if r == -1 { + errs.Error(ErrNotUTF8{ + Path: l.Path(), + At: len(l.Text()) - len(text), + Byte: text[0], + }) + return + } + text = text[utf8.RuneLen(r):] + } + + var tokens int + for !l.Done() { + start := l.cursor + r := l.Pop() + + prevTokens := tokens + if prevTokens > 0 && prevTokens == len(l.stream) { + panic(fmt.Sprintf("protocompile/ast: lexer failed to make progress at offset %d; this is a bug in protocompile", l.cursor)) + } + tokens = len(l.stream) + + switch { + case unicode.IsSpace(r): + // Whitepace. Consume as much whitespace as possible and mint a + // whitespace token. + l.TakeWhile(unicode.IsSpace) + l.PushToken(l.cursor-start, TokenSpace) + + case r == '/' && l.Peek() == '/': + l.cursor++ // Skip the second /. + + // Single-line comment. Seek to the next '\n' or the EOF. + var text string + if comment, ok := l.SeekInclusive("\n"); ok { + text = comment + } else { + text = l.SeekEOF() + } + l.PushToken(len("//")+len(text), TokenComment) + case r == '/' && l.Peek() == '*': + l.cursor++ // Skip the *. + + // Block comment. Seek to the next "*/". Protobuf comments + // unfortunately do not nest, and allowing them to nest can't + // be done in a backwards-compatible manner. We acknowledge that + // this behavior is user-hostile. + // + // If we encounter no "*/", seek EOF and emit a diagnostic. Trying + // to lex a partial comment is hopeless. + + var text string + if comment, ok := l.SeekInclusive("\n"); ok { + text = comment + } else { + // Create a span for the /*, that's what we're gonna highlight. + errs.Error(ErrUnterminated{Span: l.NewSpan(l.cursor-2, l.cursor)}) + text = l.SeekEOF() + } + l.PushToken(len("/*")+len(text), TokenComment) + case r == '*' && l.Peek() == '/': + // The user definitely thought nested comments were allowed. :/ + tok := l.PushToken(len("*/"), TokenUnrecognized) + errs.Error(ErrUnterminated{Span: tok.Span()}) + + case strings.ContainsRune(";,/:=-", r): // . is handled elsewhere. + // Random punctuation that doesn't require special handling. + l.PushToken(utf8.RuneLen(r), TokenPunct) + + case strings.ContainsRune("([{<", r): // Push the opener, close it later. + token := l.PushToken(utf8.RuneLen(r), TokenPunct) + l.openStack = append(l.openStack, token) + case strings.ContainsRune(")]}>", r): + token := l.PushToken(utf8.RuneLen(r), TokenPunct) + if len(l.openStack) == 0 { + errs.Error(ErrUnterminated{Span: token.Span()}) + } else { + end := len(l.openStack) - 1 + var expected string + switch l.openStack[end].Text() { + case "(": + expected = ")" + case "[": + expected = "]" + case "{": + expected = "}" + case "<": + expected = ">" + } + if token.Text() != expected { + errs.Error(ErrUnterminated{Span: l.openStack[end].Span(), Mismatch: token.Span()}) + } + + l.FuseTokens(l.openStack[end], token) + l.openStack = l.openStack[:end] + } + + case r == '"', r == '\'': + l.cursor-- // Back up to behind the quote before resuming. + l.LexString(errs) + + case r == '.': + // A . is normally a single token, unless followed by a digit, which makes it + // into a digit. + if r := l.Peek(); !unicode.IsDigit(r) { + l.PushToken(1, TokenPunct) + continue + } + fallthrough + case unicode.IsDigit(r): + // Back up behind the rune we just popped. + l.cursor -= utf8.RuneLen(r) + l.LexNumber(errs) + + case r == '_' || unicode.IsLetter(r): // Consume fairly-open-ended identifiers, legalize to ASCII later. + l.TakeWhile(func(r rune) bool { + return r == '_' || unicode.IsDigit(r) || unicode.IsLetter(r) + }) + token := l.PushToken(l.cursor-start, TokenIdent) + + // Legalize non-ASCII runes. + if !isASCIIIdent(token.Text()) { + errs.Error(ErrNonASCIIIdent{Token: token}) + } + + default: // Consume as much stuff we don't understand as possible, diagnose it. + l.TakeWhile(func(r rune) bool { + return !strings.ContainsRune(";,/:=-.([{<>}])_\"'", r) && + unicode.IsDigit(r) && unicode.IsLetter(r) + }) + token := l.PushToken(l.cursor-start, TokenUnrecognized) + errs.Error(ErrUnrecognized{Token: token}) + } + } + + // Legalize against unclosed delimiters. + for _, open := range l.openStack { + errs.Error(ErrUnterminated{Span: open.Span()}) + } + // In backwards order, generate empty tokens to fuse with + // the unclosed delimiters. + for i := len(l.openStack) - 1; i >= 0; i-- { + empty := l.PushToken(0, TokenUnrecognized) + l.FuseTokens(l.openStack[i], empty) + } + + // Perform implicit string concatenation. + catStrings := func(start, end Token) { + var buf strings.Builder + for i := start.raw; i <= end.raw; i++ { + tok := i.With(l.Context) + if s, ok := tok.AsString(); ok { + buf.WriteString(s) + delete(l.literals, tok.raw) + } + } + l.literals[start.raw] = buf.String() + l.FuseTokens(start, end) + } + var start, end Token + for i := range l.stream { + tok := rawToken(i + 1).With(l.Context) + switch tok.Kind() { + case TokenSpace, TokenComment: + continue + case TokenString: + if start.Nil() { + start = tok + } else { + end = tok + } + default: + if !start.Nil() && !end.Nil() { + catStrings(start, end) + } + start = Token{} + end = Token{} + } + } + if !start.Nil() && !end.Nil() { + catStrings(start, end) + } +} + +// LexString lexes a number starting at the current cursor. +func (l *lexer) LexNumber(errs *report.Report) Token { + start := l.cursor + // Accept all digits, legalize later. + // Consume the largest prefix that satisfies the rules at + // https://protobuf.com/docs/language-spec#numeric-literals +more: + r := l.Peek() + if r == 'e' || r == 'E' { + _ = l.Pop() + r = l.Peek() + if r == '+' || r == '-' { + _ = l.Pop() + } + + goto more + } + if r == '.' || unicode.IsDigit(r) || unicode.IsLetter(r) || + // We consume _ because 0_n is not valid in any context, so we + // can offer _ digit separators as an extension. + r == '_' { + _ = l.Pop() + goto more + } + + // Create the token, even if this is an invalid number. This will help + // the parser pick up bad numbers into number literals. + digits := l.Text()[start:l.cursor] + token := l.PushToken(len(digits), TokenNumber) + + // Delete all _s from digits and normalize to lowercase. + digits = strings.ToLower(strings.ReplaceAll(digits, "_", "")) + + // Now, let's see if this is actually a valid number that needs a diagnostic. + // First, try to reify it as a uint64. + switch { + case strings.HasPrefix(digits, "0x"): + value, err := strconv.ParseUint(strings.TrimPrefix(digits, "0x"), 16, 64) + if err == nil { + l.literals[token.raw] = value + return token + } + + // Emit a diagnostic. Which diagnostic we emit depends on the error Go + // gave us. NB: all ParseUint errors are *strconv.NumError, as promised + // by the documentation. + if err.(*strconv.NumError).Err == strconv.ErrRange { + errs.Error(ErrIntegerOverflow{Token: token}) + } else { + errs.Error(ErrInvalidNumber{Token: token}) + + } + case strings.HasPrefix(digits, "0") && strings.IndexFunc(digits, func(r rune) bool { return r < '0' || r > '7' }) == -1: + // Annoyingly, 0777 is octal, but 0888 is not, so we have to handle this case specially. + fallthrough + case strings.HasPrefix(digits, "0o"): // Rust/Python-style octal ints are an extension. + value, err := strconv.ParseUint(strings.TrimPrefix(digits, "0o"), 8, 64) + if err == nil { + l.literals[token.raw] = value + return token + } + + if err.(*strconv.NumError).Err == strconv.ErrRange { + errs.Error(ErrIntegerOverflow{Token: token}) + } else { + errs.Error(ErrInvalidNumber{Token: token}) + + } + case strings.HasPrefix(digits, "0b"): // Binary ints are an extension. + value, err := strconv.ParseUint(strings.TrimPrefix(digits, "0b"), 2, 64) + if err == nil { + l.literals[token.raw] = value + return token + } + + if err.(*strconv.NumError).Err == strconv.ErrRange { + errs.Error(ErrIntegerOverflow{Token: token}) + } else { + errs.Error(ErrInvalidNumber{Token: token}) + + } + } + + // This is either a float or a decimal number. Try parsing as decimal first, otherwise + // try parsing as float. + _, err := strconv.ParseUint(digits, 10, 64) + if err == nil { + // This is the most common result. It gets computed ON DEMAND by Token.AsNumber. + // DO NOT place it into l.literals. + return token + } + + // Check if it's out of range, because if that's the only problem, it's + // guaranteed to parse as a possibly-infinite float. + if err.(*strconv.NumError).Err == strconv.ErrRange { + // We want this to overflow to Infinity as needed, which ParseFloat + // will do for us. Otherwise it will ties-to-even as expected. Currently, + // the spec does not say "ties-to-even", but it says "nearest value", + // which everyone says when they mean "ties-to-even" whether they know it + // or not. + // + // ParseFloat itself says it "returns the nearest floating-point number + // rounded using IEEE754 unbiased rounding", which is just a weird way to + // say "ties-to-even". + value, _ := strconv.ParseFloat(digits, 64) + l.literals[token.raw] = value + return token + } + + // If it was not well-formed, it might be a float. + value, err := strconv.ParseFloat(digits, 64) + + // This time, the syntax might be invalid. If it is, we decide whether + // this is a bad float or a bad integer based on whether it contains + // periods. + if err.(*strconv.NumError).Err == strconv.ErrSyntax { + errs.Error(ErrInvalidNumber{Token: token}) + } + + // If the error is ErrRange we don't care, it will clamp to Infinity in + // the way we want, as noted by the comment above. + l.literals[token.raw] = value + return token +} + +// LexString lexes a string starting at the current cursor. +// +// The cursor position should be just before the string's first quote character. +func (l *lexer) LexString(errs *report.Report) Token { + start := l.cursor + q := l.Pop() + + // Seek to the end of the string, unescaping as we go. We do not + // materialize an unescaped string if this string does not require escaping. + var buf strings.Builder + var haveEsc bool +escapeLoop: + for !l.Done() { + r := l.Pop() + if r == q { + break + } + + // Warn if the user has a non-printable character in their string that isn't + // ASCII whitespace. + if !unicode.IsGraphic(r) && !strings.ContainsRune(" \n\t\r", r) { + errs.Warnf("non-printable character in string literal").With( + report.Snippetf(l.NewSpan(l.cursor-utf8.RuneLen(r), l.cursor), "this is the rune U+%04x", r), + ) + } + + if r != '\\' { + // We intentionally do not legalize against literal \0 and \n. The above warning + // covers \0 and legalizing against \n is user-hostile. This is valuable for + // e.g. strings that contain CEL code. + // + // In other words, this limitation helps no one, so we ignore it. + if haveEsc { + buf.WriteRune(r) + } + continue + } + + if !haveEsc { + buf.WriteString(l.Text()[start+1 : l.cursor-1]) + haveEsc = true + } + + r = l.Pop() + switch r { + // These are all the simple escapes. + case 'a': + buf.WriteByte('\a') // U+0007 + case 'b': + buf.WriteByte('\b') // U+0008 + case 'f': + buf.WriteByte('\f') // U+000C + case 'n': + buf.WriteByte('\n') + case 'r': + buf.WriteByte('\r') + case 'v': + buf.WriteByte('\v') // U+000B + case '\\', '\'', '"', '?': + buf.WriteRune(r) + + // Octal escape. Need to eat the next twos rune if they're octal. + case '0', '1', '2', '3', '4', '5', '6', '7': + value := byte(r) - '0' + for i := 0; i < 2; i++ { + if l.Done() { + break escapeLoop + } + r = l.Peek() + + // Check before consuming the rune. If we see e.g. + // an 8, we don't want to consume it. + if r < '0' || r > '7' { + break + } + _ = l.Pop() + + value *= 8 + value |= byte(r) - '0' + } + buf.WriteByte(value) + + // Hex escapes. + case 'x', 'u', 'U': + var value uint32 + var digits, consumed int + switch r { + case 'x': + digits = 2 + case 'u': + digits = 4 + case 'U': + digits = 8 + } + + digits: + for i := 0; i < digits; i++ { + if l.Done() { + break escapeLoop + } + r = l.Peek() + + value *= 16 + switch { + case r >= '0' && r <= '9': + value |= uint32(r) - '0' + case r >= 'a' && r <= 'f': + value |= uint32(r) - 'a' + 10 + case r >= 'A' && r <= 'F': + value |= uint32(r) - 'A' + 10 + default: + break digits + } + _ = l.Pop() + + consumed++ + } + + escape := l.NewSpan(start, l.cursor) + if consumed == 0 { + errs.Error(ErrInvalidEscape{Span: escape}) + } else if r != 'x' { + if consumed != digits || !utf8.ValidRune(rune(value)) { + errs.Error(ErrInvalidEscape{Span: escape}) + } + } + + if r == 'x' { + buf.WriteByte(byte(value)) + } else { + buf.WriteRune(rune(value)) + } + default: + escape := l.NewSpan(start, l.cursor) + errs.Error(ErrInvalidEscape{Span: escape}) + } + } + + token := l.PushToken(l.cursor-start, TokenString) + if haveEsc { + l.literals[token.raw] = buf.String() + } + + quoted := token.Text() + if quoted[0] != quoted[len(quoted)-1] { + errs.Error(ErrUnterminatedStringLiteral{Token: token}) + } + + return token +} + +// Done returns whether or not we're done lexing runes. +func (l *lexer) Done() bool { + return l.Rest() == "" +} + +// Rest returns unlexed text. +func (l *lexer) Rest() string { + return l.Text()[l.cursor:] +} + +// Peek peeks the next character; returns that character and its length. +// +// Returns -1 if l.Done(). +func (l *lexer) Peek() rune { + return decodeRune(l.Rest()) +} + +// Pop consumes the next character; returns that character and its length. +// +// Returns -1 if l.Done(). +func (l *lexer) Pop() rune { + r := l.Peek() + if r != -1 { + l.cursor += utf8.RuneLen(r) + return r + } + return -1 +} + +// TakeWhile consumes the characters while they match the given function. +// Returns consumed characters. +func (l *lexer) TakeWhile(f func(rune) bool) string { + start := l.cursor + for !l.Done() { + r := l.Peek() + if r == -1 || !f(r) { + break + } + _ = l.Pop() + } + return l.Text()[start:l.cursor] +} + +// SeekInclusive seek until the given needle is found; returns the prefix inclusive that +// needle, and updates the cursor to point after it. +func (l *lexer) SeekInclusive(needle string) (string, bool) { + if idx := strings.Index(l.Rest(), needle); idx != -1 { + prefix := l.Rest()[:idx+len(needle)] + l.cursor += idx + len(needle) + return prefix, true + } + return "", false +} + +// SeekEOF seeks the cursor to the end of the file and returns the remaining text. +func (l *lexer) SeekEOF() string { + rest := l.Rest() + l.cursor += len(rest) + return rest +} + +// decodeRune is a wrapper around utf8.DecodeRuneInString that makes it easier +// to check for failure. Instead of returning RuneError (which is a valid rune!), +// it returns -1. +// +// The success conditions for DecodeRune are kind of subtle; this makes +// sure we get the logic right every time. It is somewhat annoying that +// Go did not chose to make this easier to inspect. +func decodeRune(s string) rune { + r, n := utf8.DecodeRuneInString(s) + if r == utf8.RuneError && n < 2 { + return -1 + } + return r +} + +func isASCIIIdent(s string) bool { + for _, r := range s { + if !((r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '_') { + return false + } + } + return true +} diff --git a/experimental/ast/options.go b/experimental/ast/options.go new file mode 100644 index 00000000..1828d080 --- /dev/null +++ b/experimental/ast/options.go @@ -0,0 +1,147 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "github.com/bufbuild/protocompile/internal/arena" + "golang.org/x/exp/slices" +) + +// CompactOptions represents the collection of options attached to a field-like declaration, +// contained within square brackets. +// +// CompactOptions implements [Commas] over its options. +type CompactOptions struct { + withContext + + ptr arena.Pointer[rawCompactOptions] + raw *rawCompactOptions +} + +type rawCompactOptions struct { + brackets rawToken + options []struct { + option rawOption + comma rawToken + } +} + +var _ Commas[Option] = CompactOptions{} + +// Option is a key-value pair inside of a [CompactOptions] or a [DefOption]. +type Option struct { + Path Path + Equals Token + Value Expr +} + +type rawOption struct { + path rawPath + equals rawToken + value rawExpr +} + +// Brackets returns the token tree corresponding to the whole [...]. +func (o CompactOptions) Brackets() Token { + return o.raw.brackets.With(o) +} + +// Len implements [Slice] for Options. +func (o CompactOptions) Len() int { + return len(o.raw.options) +} + +// At implements [Slice] for Options. +func (o CompactOptions) At(n int) Option { + return o.raw.options[n].option.With(o) +} + +// Iter implements [Slice] for Options. +func (o CompactOptions) Iter(yield func(int, Option) bool) { + for i, arg := range o.raw.options { + if !yield(i, arg.option.With(o)) { + break + } + } +} + +// Append implements [Inserter] for Options. +func (o CompactOptions) Append(option Option) { + o.InsertComma(o.Len(), option, Token{}) +} + +// Insert implements [Inserter] for Options. +func (o CompactOptions) Insert(n int, option Option) { + o.InsertComma(n, option, Token{}) +} + +// Delete implements [Inserter] for Options. +func (o CompactOptions) Delete(n int) { + o.raw.options = slices.Delete(o.raw.options, n, n+1) +} + +// Comma implements [Commas] for Options. +func (o CompactOptions) Comma(n int) Token { + return o.raw.options[n].comma.With(o) +} + +// AppendComma implements [Commas] for Options. +func (o CompactOptions) AppendComma(option Option, comma Token) { + o.InsertComma(o.Len(), option, comma) +} + +// InsertComma implements [Commas] for Options. +func (o CompactOptions) InsertComma(n int, option Option, comma Token) { + o.Context().panicIfNotOurs(option.Path, option.Equals, option.Value, comma) + + o.raw.options = slices.Insert(o.raw.options, n, struct { + option rawOption + comma rawToken + }{ + rawOption{ + path: option.Path.raw, + equals: option.Equals.raw, + value: toRawExpr(option.Value), + }, + comma.raw, + }) +} + +// Span implements [Spanner] for Options. +func (o CompactOptions) Span() Span { + return JoinSpans(o.Brackets()) +} + +func newOptions(ptr arena.Pointer[rawCompactOptions], c Contextual) CompactOptions { + if ptr.Nil() { + return CompactOptions{} + } + return CompactOptions{ + withContext{c.Context()}, + ptr, + ptr.In(&c.Context().options), + } +} + +func (o *rawOption) With(c Contextual) Option { + if o == nil { + return Option{} + } + return Option{ + Path: o.path.With(c), + Equals: o.equals.With(c), + Value: o.value.With(c), + } +} diff --git a/experimental/ast/parser.go b/experimental/ast/parser.go new file mode 100644 index 00000000..df5df420 --- /dev/null +++ b/experimental/ast/parser.go @@ -0,0 +1,957 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "github.com/bufbuild/protocompile/experimental/report" +) + +// parse implements the core parser loop. +func parse(errs *report.Report, c *Context) { + cursor := c.Stream() + file := c.Root() + var mark CursorMark + for !cursor.Done() { + cursor.ensureProgress(mark) + mark = cursor.Mark() + + next := parseDecl(errs, cursor, "") + if next != nil { + file.Append(next) + } + } +} + +// parseDecl parses any Protobuf declaration. +// +// This function will always advance cursor if it is not empty. +func parseDecl(errs *report.Report, cursor *Cursor, where string) Decl { + first := cursor.Peek() + if first.Nil() { + return nil + } + + if first.Text() == ";" { + cursor.Pop() + + // This is an empty decl. + return cursor.Context().NewDeclEmpty(first) + } + + if first.Text() == "{" { + cursor.Pop() + return parseBody(errs, first, first.Children(), where) + } + + // We need to parse a path here. At this point, we need to generate a + // diagnostic if there is anything else in our way before hitting parsePath. + if !canStartPath(first) { + // Consume the token, emit a diagnostic, and throw it away. + cursor.Pop() + var whereStr string + if where == "" { + whereStr = "in file scope" + } else { + whereStr = "in `" + where + "`" + } + + errs.Error(errUnexpected{ + node: first, + where: whereStr, + want: []string{"identifier", "`.`", "`;`", "`(...)`", "`{...}`"}, + }) + return nil + } + + // Parse a type followed by a path. This is the "most general" prefix of almost all + // possible productions in a decl. If the type is a TypePath which happens to be + // a keyword, we try to parse the appropriate thing (with one token of lookahead), + // and otherwise parse a field. + ty, path := parseType(errs, cursor) + + var kw Token + if path, ok := ty.(TypePath); ok { + kw = path.AsIdent() + } + + type exprComma struct { + expr Expr + comma Token + } + + // Check for the various special cases. + next := cursor.Peek() + switch kw.Text() { + case "syntax", "edition": + if where == "enum" || !path.Nil() { + // Inside of an enum, fields without types are valid, and that is ambiguous with + // a syntax node. + break + } + + args := DeclSyntaxArgs{ + Keyword: kw, + } + + args.Equals = parsePunct(errs, cursor, punctArgs{ + want: "=", + where: "in " + kw.Text() + " declaration", + }) + + // Regardless of if we see an = sign, try to parse an expression if we + // can, + if !args.Equals.Nil() || canStartExpr(cursor.Peek()) { + // If there is a trailing ; instead of an expression, make sure we can include + // it in the span for the the decl. If we don't do this, parseExpr will throw it + // away. + if semi := cursor.Peek(); semi.Text() == ";" { + args.Semicolon = cursor.Pop() + } else { + args.Value = parseExpr(errs, cursor) + } + } + + // Only diagnose a missing semicolon if we successfully parsed some + // kind of partially-valid expression. Otherwise, we might diagnose + // the same extraneous ; twice. + if args.Semicolon.Nil() { + if next := cursor.Peek(); next.Text() == "[" { + args.Options = cursor.Context().NewOptions(cursor.Pop()) + parseOptions(errs, args.Options.Brackets(), args.Options) + } + + args.Semicolon = parsePunct(errs, cursor, punctArgs{ + want: ";", + where: "in " + kw.Text() + " declaration", + diagnoseUnless: args.Equals.Nil() || args.Value == nil, + }) + } + + return cursor.Context().NewDeclSyntax(args) + + case "package": + if where != "" && next.Text() == "=" { + // If it's followed by an = sign and not at the top level, treat + // it as a field. + break + } + + cursor.Pop() + return cursor.Context().NewDeclPackage(DeclPackageArgs{ + Keyword: kw, + Path: path, + Semicolon: next, + }) + + case "import": + if where != "" && next.Text() == "=" { + // If it's followed by an = sign and not at the top level, treat + // it as a field. + break + } + + args := DeclImportArgs{ + Keyword: kw, + } + + modifier := path.AsIdent().Name() + if modifier == "public" || modifier == "weak" { + args.Modifier = path.AsIdent() + } else if !path.Nil() { + // This will catch someone writing `import foo.bar;` when we legalize. + args.ImportPath = ExprPath{Path: path} + } + + if args.ImportPath == nil && canStartExpr(next) { + args.ImportPath = parseExpr(errs, cursor) + } + + if next := cursor.Peek(); next.Text() == "[" { + args.Options = cursor.Context().NewOptions(cursor.Pop()) + parseOptions(errs, args.Options.Brackets(), args.Options) + } + + // Check for a trailing semicolon. + args.Semicolon = parsePunct(errs, cursor, punctArgs{ + want: ";", + diagnoseUnless: args.ImportPath == nil, + where: "in import", + }) + + return cursor.Context().NewDeclImport(args) + + case "reserved", "extensions": + if next.Text() == "=" { + // If whatever follows the path is an =, we're going to assume this is + // meant to be a field. + break + } + + var ( + done, bad bool + exprs []exprComma + ) + + // Convert the trailing path, if there is any, into an expression, and check for the + // first comma. + if !path.Nil() { + expr := parseOperator(errs, cursor, ExprPath{Path: path}, 0) + var comma Token + if next := cursor.Peek(); next.Text() == "," { + comma = cursor.Pop() + } else { + done = true + } + exprs = append(exprs, exprComma{expr, comma}) + bad = bad || expr == nil + } + + if !done { + // Parse expressions until we hit a semicolon or the [ of compact options. + delimited := commaDelimited(true, errs, cursor, func(cursor *Cursor) (Expr, bool) { + next := cursor.Peek().Text() + if next == ";" || next == "[" { + return nil, false + } + + expr := parseExpr(errs, cursor) + bad = bad || expr == nil + + return expr, expr != nil + }) + + delimited(func(expr Expr, comma Token) bool { + exprs = append(exprs, exprComma{expr, comma}) + return true + }) + } + + var options CompactOptions + if next := cursor.Peek(); next.Text() == "[" { + options = cursor.Context().NewOptions(cursor.Pop()) + parseOptions(errs, options.Brackets(), options) + } + + // Parse a semicolon, if possible. + semi := parsePunct(errs, cursor, punctArgs{ + want: ";", + where: "in `" + kw.Text() + "` range", + diagnoseUnless: bad, + }) + + range_ := cursor.Context().NewDeclRange(DeclRangeArgs{ + Keyword: kw, + Options: options, + Semicolon: semi, + }) + for _, e := range exprs { + range_.AppendComma(e.expr, e.comma) + } + + return range_ + } + + args := DeclDefArgs{ + Type: ty, + Name: path, + } + + var inputs, outputs, braces, brackets Token + + // Try to parse the various "followers". + var mark CursorMark + for !cursor.Done() { + cursor.ensureProgress(mark) + mark = cursor.Mark() + + // Note that the inputs and outputs of a method are parsed + // separately, so foo(bar) and foo returns (bar) are both possible. + next := cursor.Peek() + if next.Text() == "(" { + cursor.Pop() + if !inputs.Nil() { + errs.Error(ErrMoreThanOne{ + First: inputs, + Second: next, + what: "method input parameter list", + }) + } else { + inputs = next + } + continue + } + if next.Text() == "returns" { + args.Returns = cursor.Pop() + next := parsePunct(errs, cursor, punctArgs{ + want: "(...)", + where: "after `returns`", + }) + + if !outputs.Nil() && !next.Nil() { + errs.Error(ErrMoreThanOne{ + First: outputs, + Second: next, + what: "method input parameter list", + }) + } else { + outputs = next + } + continue + } + + if next.Text() == "[" { + cursor.Pop() + if !args.Options.Nil() { + errs.Error(ErrMoreThanOne{ + First: args.Options, + Second: next, + what: "compact options list", + }) + } else { + brackets = next + } + } + + if next.Text() == "{" { + cursor.Pop() + if !braces.Nil() { + errs.Error(ErrMoreThanOne{ + First: args.Options, + Second: next, + what: "definition body", + }) + } else { + braces = next + } + } + + // This will slurp up a value *not* prefixed with an =, too, but that + // case will be diagnosed. + isEq := next.Text() == "=" + if isEq || (canStartExpr(next) && braces.Nil() && args.Options.Nil()) { + if isEq { + args.Equals = cursor.Pop() + } + + next := parseExpr(errs, cursor) + if next == nil || next.Context() == nil { + continue // parseExpr generates diagnostics for this case. + } + + if args.Value != nil { + what := "field tag" + if kw.Text() == "option" { + what = "option value" + } else if ty == nil { + what = "enum value" + } + + errs.Error(ErrMoreThanOne{ + First: args.Value, + Second: next, + what: what, + }) + } else if args.Equals.Nil() { + errs.Error(errUnexpected{ + node: next, + where: "without leading `=`", + got: "expression", + }) + } + + continue + } + + break + } + + if braces.Nil() { + args.Semicolon = parsePunct(errs, cursor, punctArgs{ + want: ";", + where: "after declaration", + }) + } + + parseTypes := func(parens Token, types TypeList) { + delimited := commaDelimited(true, errs, parens.Children(), func(cursor *Cursor) (Type, bool) { + ty, path := parseType(errs, cursor) + if !path.Nil() { + errs.Error(errUnexpected{ + node: path, + where: "in method parameter list", + got: "path", + }) + } + return ty, ty != nil + }) + + delimited(func(ty Type, comma Token) bool { + types.AppendComma(ty, comma) + return true + }) + } + + def := cursor.Context().NewDeclDef(args) + if !inputs.Nil() { + parseTypes(inputs, def.WithSignature().Inputs()) + } + if !outputs.Nil() { + parseTypes(outputs, def.WithSignature().Outputs()) + } + if !brackets.Nil() { + var options CompactOptions + if next := cursor.Peek(); next.Text() == "[" { + options = cursor.Context().NewOptions(cursor.Pop()) + parseOptions(errs, options.Brackets(), options) + } + def.SetOptions(options) + } + if !braces.Nil() { + where := where + switch kw.Text() { + case "message", "enum", "service", "extend", "group", "oneof", "rpc": + where = kw.Text() + } + + def.SetBody(parseBody(errs, braces, braces.Children(), where)) + } + + return def +} + +// parseBody parses an (optionally-{}-delimited) body of declarations. +func parseBody(errs *report.Report, token Token, contents *Cursor, where string) DeclScope { + body := contents.Context().NewDeclBody(token) + + // Drain the contents of the body into it. Remember, + // parseDecl must always make progress if there is more to + // parse. + for !contents.Done() { + if next := parseDecl(errs, contents, where); next != nil { + body.Append(next) + } + } + + return body +} + +// parseOptions parses a compact options list out of a [] token. +func parseOptions(errs *report.Report, brackets Token, options CompactOptions) CompactOptions { + cursor := brackets.Children() + delimited := commaDelimited(true, errs, cursor, func(cursor *Cursor) (Option, bool) { + path := parsePath(errs, cursor) + if path.Nil() { + return Option{}, false + } + + eq := cursor.Peek() + if eq.Text() == "=" { + cursor.Pop() + } else { + errs.Error(errUnexpected{ + node: eq, + want: []string{"`=`"}, + }) + eq = Token{} + } + + expr := parseExpr(errs, cursor) + if expr == nil { + return Option{}, false + } + + return Option{path, eq, expr}, true + }) + + delimited(func(opt Option, comma Token) bool { + options.AppendComma(opt, comma) + return true + }) + + return options +} + +// canStartExpr returns whether or not tok can start an expression. +func canStartExpr(tok Token) bool { + return canStartPath(tok) || tok.Kind() == TokenNumber || tok.Kind() == TokenString || + tok.Text() == "-" || tok.Text() == "{" || tok.Text() == "[" +} + +// parseExpr attempts to parse a full expression. +// +// May return nil if parsing completely fails. +func parseExpr(errs *report.Report, cursor *Cursor) Expr { + return parseOperator(errs, cursor, nil, 0) +} + +// parseOperator parses an operator expression (i.e., an expression that consists of more than +// one sub-expression). +// +// prec is the precedence; higher values mean tighter binding. This function calls itself +// with higher (or equal) precedence values. +func parseOperator(errs *report.Report, cursor *Cursor, expr Expr, prec int) Expr { + if expr == nil { + if prec >= 2 { + expr = parseAtomicExpr(errs, cursor) + } else { + expr = parseOperator(errs, cursor, expr, prec+1) + } + if expr == nil { + return nil + } + } + + lookahead := cursor.Peek() + switch prec { + case 0: + switch lookahead.Text() { + case ":", "=": // Allow equals signs, which are usually a mistake. + expr = cursor.Context().NewExprKV(ExprKVArgs{ + Key: expr, + Colon: cursor.Pop(), + Value: parseOperator(errs, cursor, nil, prec+1), + }) + case "{": // This is for colon-less, dict-values fields. + // The previous expression cannot also be a key-value pair, since + // this messes with parsing of dicts, which are not comma-separated. + if _, isKV := expr.(ExprKV); !isKV { + expr = cursor.Context().NewExprKV(ExprKVArgs{ + Key: expr, + Value: parseOperator(errs, cursor, nil, prec+1), + }) + } + } + case 1: + switch lookahead.Text() { + case "to": + expr = cursor.Context().NewExprRange(ExprRangeArgs{ + Start: expr, + To: cursor.Pop(), + End: parseOperator(errs, cursor, nil, prec), + }) + } + } + + return expr +} + +// ParseExpr attempts to parse an "atomic" expression, which is an expression that does not +// contain any infix operators. +// +// May return nil if parsing completely fails. +func parseAtomicExpr(errs *report.Report, cursor *Cursor) Expr { + exprPrefix := []string{ + "identifier", "number", "string", "`.`", "`-`", "`(...)`", "`[...]`", "`{...}`", + } + + next := cursor.Peek() + if next.Nil() { + errs.Error(errEOF(cursor, "in expression", exprPrefix)) + return nil + } + + switch { + case next.Kind() == TokenString, next.Kind() == TokenNumber: + return ExprLiteral{Token: cursor.Pop()} + + case canStartPath(next): + return ExprPath{Path: parsePath(errs, cursor)} + + case next.Text() == "[": + brackets := cursor.Pop() + delimited := commaDelimited(true, errs, brackets.Children(), func(cursor *Cursor) (Expr, bool) { + expr := parseExpr(errs, cursor) + return expr, expr != nil + }) + + array := cursor.Context().NewExprArray(brackets) + delimited(func(expr Expr, comma Token) bool { + array.AppendComma(expr, comma) + return true + }) + return array + + case next.Text() == "{": + cursor.Pop() + delimited := commaDelimited(false, errs, next.Children(), func(cursor *Cursor) (Expr, bool) { + expr := parseExpr(errs, cursor) + return expr, expr != nil + }) + + dict := cursor.Context().NewExprDict(next) + delimited(func(expr Expr, comma Token) bool { + field, ok := expr.(ExprKV) + if !ok { + errs.Error(errUnexpected{ + node: expr, + got: "expression", + want: []string{"key-value pair"}, + }) + + field = cursor.Context().NewExprKV(ExprKVArgs{Value: expr}) + } + dict.AppendComma(field, comma) + return true + }) + return dict + + case next.Text() == "-": + // NOTE: Protobuf does not (currently) have any suffix expressions, like a function + // call, but if those are added, this will need to be hoisted into a parsePrefixExpr + // function that calls parseAtomicExpr. + cursor.Pop() + inner := parseAtomicExpr(errs, cursor) + return cursor.Context().NewExprPrefixed(ExprPrefixedArgs{ + Prefix: next, + Expr: inner, + }) + + default: + // Consume the token and diagnose it. + cursor.Pop() + errs.Error(errUnexpected{ + node: next, + want: exprPrefix, + }) + } + + return nil +} + +// parseType attempts to parse a type, optionally followed by a non-absolute path. +// +// This function is called in many situations that seem a bit weird to be parsing a type +// in, such as at the top level. This is because of an essential ambiguity in Protobuf's +// grammar: message Foo can start either a field (message Foo;) or a message (message Foo {}). +// Thus, we always parts a type-and-path, and based on what comes next, reinterpret the type +// as potentially being a keyword. +// +// This function assumes that we have decided to definitely parse a type, and +// will emit diagnostics to that effect. As such, the current token position on cursor +// should not be nil. +// +// May return nil if parsing completely fails. +func parseType(errs *report.Report, cursor *Cursor) (Type, Path) { + // First, parse a path, possibly preceded by a sequence of modifiers. + var ( + mods []Token + tyPath Path + ) + for !cursor.Done() && tyPath.Nil() { + next := cursor.Peek() + if !canStartPath(next) { + break + } + + tyPath = parsePath(errs, cursor) + + // Determine if this path is a modifier followed by a path. + if tyPath.Absolute() { + // Absolute paths cannot start with a modifier, so we are done. + break + } + + // Peel off the first two path components. + var components []PathComponent + tyPath.Components(func(component PathComponent) bool { + components = append(components, component) + return len(components) < 2 + }) + + ident := components[0].AsIdent() + if ident.Nil() { + // If this starts with an extension, we're also done. + break + } + + // There is one slightly squirrelly case: if the user wrote package .foo.bar; + // it will get picked up as the single path package.foo.bar. In this case, we would + // like to break apart the type so that it can diagnose as a package. + probablePackage := len(mods) == 0 && ident.Text() == "package" && !canStartPath(cursor.Peek()) + splitFirst := probablePackage + + // Check if ident is a modifier, and if so, peel it off. + if mod := TypePrefixByName(ident.Name()); mod != TypePrefixUnknown { + mods = append(mods, ident) + splitFirst = true + } + + if splitFirst { + // Drop the first component from the path. + if len(components) == 1 { + tyPath = Path{} + } else if !components[1].Separator().Nil() { + tyPath.raw[0] = components[1].Separator().raw + } else { + tyPath.raw[0] = components[1].Name().raw + } + } + + if probablePackage { + return rawType{ident.raw, ident.raw}.With(cursor), tyPath + } + } + + if tyPath.Nil() { + if len(mods) == 0 { + return nil, Path{} + } + + // Pop the last mod and make that into the path. This makes `optional optional` work + // as a type. + last := mods[len(mods)-1] + tyPath = rawPath{last.raw, last.raw}.With(cursor) + mods = mods[:len(mods)-1] + } + + ty := Type(TypePath{tyPath}) + + // Next, look for some angle brackets. We need to do this before draining `mods`, because + // angle brackets bind more tightly than modifiers. + next := cursor.Peek() + if next.Text() == "<" { + cursor.Pop() // Consume the angle brackets. + generic := cursor.Context().NewTypeGeneric(TypeGenericArgs{ + Path: tyPath, + AngleBrackets: next, + }) + + delimited := commaDelimited(true, errs, next.Children(), func(cursor *Cursor) (Type, bool) { + if next := cursor.Peek(); !canStartPath(next) { + errs.Error(errUnexpected{ + node: next, + want: []string{"identifier", ".", "(...)"}, + }) + return nil, false + } + + ty, path := parseType(errs, cursor) + if !path.Nil() { + errs.Error(errUnexpected{ + node: path, + where: "in type argument list", + got: "field name", + }) + } + return ty, ty != nil + }) + + delimited(func(ty Type, comma Token) bool { + generic.Args().AppendComma(ty, comma) + return true + }) + } + + // Now, check for a path that follows all this. If there isn't a path, and + // ty is TypePath, and there is still at least one modifier, we interpret the + // last modifier as the type and the current path type as the path after the type. + var path Path + next = cursor.Peek() + if canStartPath(next) { + path = parsePath(errs, cursor) + } else if _, ok := ty.(TypePath); ok && len(mods) > 0 { + path = tyPath + + // Pop the last mod and make that into the type. This makes `optional optional = 1` work + // as a field. + last := mods[len(mods)-1] + tyPath = rawPath{last.raw, last.raw}.With(cursor) + mods = mods[:len(mods)-1] + ty = TypePath{tyPath} + } + + // Finally, apply any remaining modifiers, in reverse order, to ty. + for i := len(mods) - 1; i >= 0; i-- { + ty = cursor.Context().NewTypePrefixed(TypePrefixedArgs{ + Prefix: mods[i], + Type: ty, + }) + } + + return ty, path +} + +// commaDelimited returns an iterator over a comma-delimited list of things out of cursor. +// This automatically handles various corner-cases around commas that occur throughout the +// grammar. +// +// This will completely drain cursor, unless the parse function returns false, which signals +// that the end of the list has been reached. +func commaDelimited[T any]( + commasRequired bool, + _ *report.Report, + cursor *Cursor, + parse func(*Cursor) (T, bool), +) func(func(T, Token) bool) { + return func(yield func(T, Token) bool) { + for !cursor.Done() { + result, ok := parse(cursor) + + // Check for a trailing comma. + var comma Token + if next := cursor.Peek(); next.Text() == "," { + cursor.Pop() + comma = next + } + + if !ok || !yield(result, comma) { + break + } + + if commasRequired && comma.Nil() { + break + } + } + } +} + +// canStartPath returns whether or not tok can start a path. +func canStartPath(tok Token) bool { + return tok.Kind() == TokenIdent || tok.Text() == "." || tok.Text() == "/" || tok.Text() == "(" +} + +// parsePath parses the longest path at cursor. Returns a nil path if +// the next token is neither an identifier, a dot, or a (). +// +// If an invalid token occurs after a dot, returns the longest path up until that dot. +// The cursor is then placed after the dot. +// +// This function assumes that we have decided to definitely parse a path, and +// will emit diagnostics to that effect. As such, the current token position on cursor +// should not be nil. +func parsePath(errs *report.Report, cursor *Cursor) Path { + start := cursor.Peek() + if !canStartPath(start) { + errs.Error(errUnexpected{ + node: start, + want: []string{"identifier", "`.`", "`(...)`"}, + }) + return Path{} + } + + // Whether the next unskippable token should be a separator. + var prevSeparator Token + if start.Text() == "." || start.Text() == "/" { + prevSeparator = cursor.Pop() + } + end := start +pathLoop: + for !cursor.Done() { + next := cursor.Peek() + first := start == next + switch { + case next.Text() == "." || next.Text() == "/": + if !prevSeparator.Nil() { + // This is a double dot, so something like foo..bar, ..foo, or foo.. + // We diagnose it and move on -- Path.Components is robust against + // this kind of pattern. + errs.Error(errUnexpected{ + node: next, + where: "after `" + prevSeparator.Text() + "`", + want: []string{"identifier", "`(...)`"}, + }) + } + prevSeparator = cursor.Pop() + + case next.Kind() == TokenIdent: + if !first && prevSeparator.Nil() { + // This means we found something like `foo bar`, which means we + // should stop consuming components. + break pathLoop + } + + end = next + prevSeparator = Token{} + cursor.Pop() + + case next.Text() == "(": + if !first && prevSeparator.Nil() { + // This means we found something like `foo(bar)`, which means we + // should stop consuming components. + break pathLoop + } + + // Recurse into this token and check it, too, contains a path. We throw + // the result away once we're done, because we don't need to store it; + // a Path simply stores its start and end tokens and knows how to + // recurse into extensions. We also need to check there are no + // extraneous tokens. + contents := next.Children() + parsePath(errs, contents) + if tok := contents.Peek(); !tok.Nil() { + errs.Error(errUnexpected{ + node: start, + where: "in extension path", + }) + } + + end = next + prevSeparator = Token{} + cursor.Pop() + + default: + if prevSeparator.Nil() { + // This means we found something like `foo =`, which means we + // should stop consuming components. + break pathLoop + } + + // This means we found something like foo.1 or bar."xyz" or bar.[...]. + // TODO: Do smarter recovery here. Generally speaking it's likely we should *not* + // consume this token. + errs.Error(errUnexpected{ + node: next, + want: []string{"identifier", "`(...)`"}, + }) + + end = prevSeparator // Include the trailing separator. + break pathLoop + } + } + + // NOTE: We do not need to legalize against a single-dot path; that + // is already done for us by the if nextDot checks. + + return rawPath{start.raw, end.raw}.With(cursor) +} + +type punctArgs struct { + want string + diagnoseUnless bool + where string +} + +// parsePunct attempts to unconditionally parse some punctuation. +// +// If the wrong token is encountered, it DOES NOT consume the token, returning a nil +// token instead. If diagnose is true, this will diagnose the problem. +func parsePunct(errs *report.Report, cursor *Cursor, args punctArgs) Token { + next := cursor.Peek() + if next.Text() == args.want { + return cursor.Pop() + } + if !args.diagnoseUnless { + if next.Nil() { + errs.Error(errEOF(cursor, args.where, []string{"`" + args.want + "`"})) + } else { + errs.Error(errUnexpected{ + node: next, + where: args.where, + want: []string{"`" + args.want + "`"}, + }) + } + } + return Token{} +} diff --git a/experimental/ast/parser_test.go b/experimental/ast/parser_test.go new file mode 100644 index 00000000..d2b76d2b --- /dev/null +++ b/experimental/ast/parser_test.go @@ -0,0 +1,97 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast_test + +import ( + "fmt" + "regexp" + "strings" + "testing" + + "github.com/bufbuild/protocompile/experimental/ast" + "github.com/bufbuild/protocompile/experimental/report" + "github.com/bufbuild/protocompile/internal/golden" + "github.com/tidwall/pretty" + "google.golang.org/protobuf/encoding/protojson" +) + +const jsonSpanSub = `{ "start": $1, "end": $2 }` + +var jsonSpanPat = regexp.MustCompile(`\{\s*"start":\s*(\d+),\s*"end":\s*(\d+)\s*\}`) + +func TestParser(t *testing.T) { + renderer := report.Renderer{ + ShowRemarks: true, + } + + corpus := golden.Corpus{ + Root: "testdata/parser", + Refresh: "PROTOCOMPILE_REFRESH", + Extension: "proto", + Outputs: []golden.Output{ + {Extension: "lex.tsv"}, + {Extension: "ast.json"}, + {Extension: "stderr"}, + }, + } + corpus.Run(t, func(t *testing.T, path, text string, results []string) { + var r report.Report + defer func() { + // Dump out the report in a defer so it shows up even if we panic. + r.Sort() + text, _ := renderer.Render(&r) + results[2] = text + }() + + file := ast.Parse(report.File{Path: path, Text: text}, &r) + proto := ast.FileToProto(file) + + var tokens strings.Builder + file.Context().Tokens().Iter(func(i int, tok ast.Token) bool { + start, end := tok.Span().Offsets() + loc := tok.Span().Start() + fmt.Fprintf(&tokens, "%4d:%#04x\t%v\t%d:%d\t%d:%d:%d", i, i, tok.Kind(), start, end, loc.Line, loc.Column, loc.UTF16) + if v, ok := tok.AsInt(); ok { + fmt.Fprintf(&tokens, "\t%d", v) + } else if v, ok := tok.AsFloat(); ok { + fmt.Fprintf(&tokens, "\t%f", v) + } else if v, ok := tok.AsString(); ok { + fmt.Fprintf(&tokens, "\t%q", v) + } + fmt.Fprintf(&tokens, "\t%q\n", tok.Text()) + + return true + }) + results[0] = tokens.String() + + jsonOptions := protojson.MarshalOptions{ + Multiline: true, + Indent: " ", + } + + json, err := jsonOptions.Marshal(proto) + if err != nil { + results[1] = fmt.Sprint("marshal error:", err) + } else { + json = pretty.PrettyOptions(json, &pretty.Options{ + Indent: " ", + }) + results[1] = string(json) + } + + // Compact all of the Span objects into single lines. + results[1] = jsonSpanPat.ReplaceAllString(results[1], jsonSpanSub) + }) +} diff --git a/experimental/ast/path.go b/experimental/ast/path.go new file mode 100644 index 00000000..54119c72 --- /dev/null +++ b/experimental/ast/path.go @@ -0,0 +1,210 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +// Path represents a multi-part identifier. +// +// This includes single identifiers like foo, references like foo.bar, +// and fully-qualified names like .foo.bar. +type Path struct { + withContext + + raw rawPath +} + +// Absolute returns whether this path starts with a dot. +func (p Path) Absolute() bool { + var abs bool + p.Components(func(c PathComponent) bool { + abs = !c.Separator().Nil() + return false + }) + return abs +} + +// AsIdent returns the single identifier that comprises this path, or +// the nil token. +func (p Path) AsIdent() Token { + var tok Token + var count int + p.Components(func(c PathComponent) bool { + if count > 0 { + tok = Token{} + return false + } + + if c.Separator().Nil() { + tok = c.AsIdent() + } + + count++ + return true + }) + return tok +} + +// AsBuiltin returns the builtin that this path represents. +// +// If this path does not represent a builtin, returns [BuiltinUnknown]. +func (p Path) AsBuiltin() Builtin { + return BuiltinByName(p.AsIdent().Text()) +} + +// Span implements [Spanner] for Path. +func (p Path) Span() Span { + return JoinSpans(p.raw[0].With(p), p.raw[1].With(p)) +} + +// Components is an [iter.Seq] that ranges over each component in this path. Specifically, +// it yields the (nilable) dot that precedes the component, and the identifier token. +func (p Path) Components(yield func(PathComponent) bool) { + if p.Nil() { + return + } + + first := p.raw[0].With(p) + if synth := first.synthetic(); synth != nil { + panic("synthetic paths are not implemented yet") + } + + cursor := Cursor{ + withContext: p.withContext, + start: p.raw[0], + end: p.raw[1] + 1, // Remember, Cursor.end is exclusive! + } + + var sep Token + var broken bool + cursor.Iter(func(tok Token) bool { + if tok.Text() == "." || tok.Text() == "/" { + if !sep.Nil() { + // Uh-oh, empty path component! + if !yield(PathComponent{sep, Token{}}) { + broken = true + return false + } + } + sep = tok + return true + } + + if !yield(PathComponent{sep, tok}) { + broken = true + return false + } + sep = Token{} + return true + }) + if !broken && !sep.Nil() { + yield(PathComponent{sep, Token{}}) + } +} + +// PathComponent is a piece of a path. This is either an identifier or a nested path +// (for an extension name). +type PathComponent struct { + separator, name Token +} + +// Separator is the token that separates this component fromm the previous one, if +// any. This may be a dot or a slash. +func (p PathComponent) Separator() Token { + return p.separator +} + +// Name is the token that represents this component's name. THis is either an +// identifier or a (...) token containing a path. +func (p PathComponent) Name() Token { + return p.name +} + +// Returns whether this is an empty path component. Such components are not allowed +// in the grammar but may occur in invalid inputs nonetheless. +func (p PathComponent) IsEmpty() bool { + return p.Name().Nil() +} + +// IsExtension returns whether this path component is an extension component, i.e. +// (a.b.c). +func (p PathComponent) IsExtension() bool { + return p.Name().Kind() == TokenPunct +} + +// AsExtension returns the Path inside of this path component, if it is an extension +// path component. +func (p PathComponent) AsExtension() Path { + if !p.IsExtension() { + return Path{} + } + + // If this is a synthetic token, its children are already precisely a path, + // so we can use the "synthetic with children" form of Path. + if synth := p.Name().synthetic(); synth != nil { + return Path{withContext{p.Name().Context()}, rawPath{p.Name().raw, 0}} + } + + // Find the first and last non-skippable tokens to be the bounds. + var first, last Token + p.Name().Children().Iter(func(token Token) bool { + if token.Kind().IsSkippable() { + return true + } + + if first.Nil() { + first = token + } else { + // Only set last after seeing first, because then if we only + // ever see one non-skippable token, it will leave last nil. + last = token + } + return true + }) + + return Path{withContext{p.Name().Context()}, rawPath{first.raw, last.raw}} +} + +// AsIdent returns the single identifier that makes up this path component, if +// it is not an extension path component. +func (p PathComponent) AsIdent() Token { + if p.IsExtension() { + return Token{} + } + return p.name +} + +// rawPath is the raw contents of a Path without its Context. +// +// This has one of the following configurations. +// +// 1. Two zero tokens. This is the nil path. +// +// 2. Two non-synthetic tokens. This means the path is all tokens between them including +// the end-point +// +// 3. A single synthetic token and a nil token. If this token has children, those are +// the path components. Otherwise, the token itself is the sole token. +// +// NOTE: Multiple compressed representations in this package depend on the fact that +// if raw[0] < 0, then raw[1] == 0 for all valid paths. +type rawPath [2]rawToken + +// Wrap wraps this rawPath with a context to present to the user. +func (p rawPath) With(c Contextual) Path { + if p[0] == 0 { + return Path{} + } + + return Path{withContext{c.Context()}, p} +} diff --git a/experimental/ast/slice.go b/experimental/ast/slice.go new file mode 100644 index 00000000..e10a157c --- /dev/null +++ b/experimental/ast/slice.go @@ -0,0 +1,94 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +// Slice is a type that offers the same interface as an ordinary Go +// slice. +// +// This is used to provide a consistent interface to various AST nodes that +// contain a variable number of "something", but the actual backing array +// is some compressed representation. +type Slice[T any] interface { + // Len returns this slice's length. + Len() int + + // At returns the nth value of this slice. + // + // Panics if n >= Len(). + At(n int) T + + // Iter is an iterator over the slice. + Iter(yield func(int, T) bool) +} + +// Inserter is a [Slice] that allows insertion and removal of elements at specific +// indices. +// +// Insertion/removal behavior while calling Iter() is unspecified. +type Inserter[T any] interface { + Slice[T] + + // Append appends a value to this sequence. + Append(value T) + + // Inserts a value at the index n, shifting things around as needed. + // + // Panics if n > Len(). Insert(Len(), x) will append. + Insert(n int, value T) + + // Delete deletes T from the index n. + // + // Panics if n >= Len(). + Delete(n int) +} + +// Commas is like [Slice], but it's for a comma-delimited list of some kind. +// +// This makes it easy to work with the list as though it's a slice, while also +// allowing access to the commas. +type Commas[T any] interface { + Inserter[T] + + // Comma is like [Slice.At] but returns the comma that follows the nth + // element. + // + // May be nil, either because it's the last element (a common situation + // where there is no comma) or it was added with Insert() rather than + // InsertComma(). + Comma(n int) Token + + // InsertComma is like Append, but includes an explicit comma. + AppendComma(value T, comma Token) + + // InsertComma is like Insert, but includes an explicit comma. + InsertComma(n int, value T, comma Token) +} + +// FuncSlice implements Slice using an ordinary Go slice and a function to transform +// elements. +type funcSlice[T, U any] struct { + s []T + f func(int, *T) U +} + +func (s funcSlice[T, U]) Len() int { return len(s.s) } +func (s funcSlice[T, U]) At(n int) U { return s.f(n, &s.s[n]) } +func (s funcSlice[T, U]) Iter(yield func(int, U) bool) { + for i := range s.s { + if !yield(i, s.f(i, &s.s[i])) { + break + } + } +} diff --git a/experimental/ast/span.go b/experimental/ast/span.go new file mode 100644 index 00000000..6a24185b --- /dev/null +++ b/experimental/ast/span.go @@ -0,0 +1,105 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "math" + + "github.com/bufbuild/protocompile/experimental/report" +) + +// Spanner is any type that has a span, given as a range of tokens. +type Spanner interface { + Contextual + + // Returns this value's span, if known. + // + // Some nodes may not have spans, such as those produced synthetically + // during rewrites. In this case, the returned span will be the zero + // value. + Span() Span +} + +// Span is a source code span, i.e., a range of tokens. +// +// Spans are used primarily for error reporting. +type Span struct { + withContext + + start, end int +} + +var _ report.Span = Span{} + +// Span implements [Spanner] for Span. +func (s Span) Span() Span { + return s +} + +// Offsets returns the byte offsets for this span. +func (s Span) Offsets() (start, end int) { + return s.start, s.end +} + +// Text returns the text corresponding to this span. +func (s Span) Text() string { + return s.File().Text[s.start:s.end] +} + +// File returns the file this span is for. +func (s Span) File() report.File { + return s.Context().file.File() +} + +// Start returns the start location for this span. +func (s Span) Start() report.Location { + return s.Context().file.Search(s.start) +} + +// Start returns the end location for this span. +func (s Span) End() report.Location { + return s.Context().file.Search(s.end) +} + +// JoinSpans joins a collection of spans, returning the smallest span that +// contains all of them. +// +// Nil spans among spans are ignored. If every span in spans is nil, returns +// the nil span. +// +// If there are at least two distinct non-nil contexts among the spans, +// this function panics. +func JoinSpans(spans ...Spanner) Span { + joined := Span{start: math.MaxInt} + for _, span := range spans { + if span == nil || span.Context() == nil { + continue + } + span := span.Span() + if joined.ctx == nil { + joined.ctx = span.Context() + } else if joined.ctx != span.Context() { + panic("protocompile/ast: passed spans with incompatible contexts to JoinSpans()") + } + + joined.start = min(joined.start, span.start) + joined.end = max(joined.end, span.end) + } + + if joined.ctx == nil { + return Span{} + } + return joined +} diff --git a/experimental/ast/testdata/parser/import/42.proto b/experimental/ast/testdata/parser/import/42.proto new file mode 100644 index 00000000..fe8c6198 --- /dev/null +++ b/experimental/ast/testdata/parser/import/42.proto @@ -0,0 +1,19 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +import 42; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/import/42.proto.ast.json b/experimental/ast/testdata/parser/import/42.proto.ast.json new file mode 100644 index 00000000..fbe2f972 --- /dev/null +++ b/experimental/ast/testdata/parser/import/42.proto.ast.json @@ -0,0 +1,53 @@ +{ + "file": { + "path": "testdata/parser/import/42.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgppbXBvcnQgNDI7" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "import": { + "importPath": { + "literal": { + "intValue": "42", + "span": { "start": 648, "end": 650 } + } + }, + "span": { "start": 641, "end": 651 }, + "keywordSpan": { "start": 641, "end": 647 }, + "importPathSpan": { "start": 648, "end": 650 }, + "semicolonSpan": { "start": 650, "end": 651 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/import/42.proto.lex.tsv b/experimental/ast/testdata/parser/import/42.proto.lex.tsv new file mode 100644 index 00000000..0240c671 --- /dev/null +++ b/experimental/ast/testdata/parser/import/42.proto.lex.tsv @@ -0,0 +1,30 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:647 19:1:0 "import" + 27:0x001b TokenSpace 647:648 19:7:6 " " + 28:0x001c TokenNumber 648:650 19:8:7 42 "42" + 29:0x001d TokenPunct 650:651 19:10:9 ";" diff --git a/experimental/ast/testdata/parser/import/42.proto.stderr b/experimental/ast/testdata/parser/import/42.proto.stderr new file mode 100644 index 00000000..770fbf6d --- /dev/null +++ b/experimental/ast/testdata/parser/import/42.proto.stderr @@ -0,0 +1,7 @@ +error: unexpected integer literal in import + --> testdata/parser/import/42.proto:19:8 + | +19 | import 42; + | ^^ expected string literal + +encountered 1 error diff --git a/experimental/ast/testdata/parser/import/eof_after_kw.proto b/experimental/ast/testdata/parser/import/eof_after_kw.proto new file mode 100644 index 00000000..1720382f --- /dev/null +++ b/experimental/ast/testdata/parser/import/eof_after_kw.proto @@ -0,0 +1,20 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +import "foo.proto"; +import \ No newline at end of file diff --git a/experimental/ast/testdata/parser/import/eof_after_kw.proto.ast.json b/experimental/ast/testdata/parser/import/eof_after_kw.proto.ast.json new file mode 100644 index 00000000..f03fbe71 --- /dev/null +++ b/experimental/ast/testdata/parser/import/eof_after_kw.proto.ast.json @@ -0,0 +1,59 @@ +{ + "file": { + "path": "testdata/parser/import/eof_after_kw.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgppbXBvcnQgImZvby5wcm90byI7CmltcG9ydA==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "import": { + "importPath": { + "literal": { + "stringValue": "foo.proto", + "span": { "start": 648, "end": 659 } + } + }, + "span": { "start": 641, "end": 660 }, + "keywordSpan": { "start": 641, "end": 647 }, + "importPathSpan": { "start": 648, "end": 659 }, + "semicolonSpan": { "start": 659, "end": 660 } + } + }, + { + "import": { + "span": { "start": 661, "end": 667 }, + "keywordSpan": { "start": 661, "end": 667 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/import/eof_after_kw.proto.lex.tsv b/experimental/ast/testdata/parser/import/eof_after_kw.proto.lex.tsv new file mode 100644 index 00000000..78f9d01b --- /dev/null +++ b/experimental/ast/testdata/parser/import/eof_after_kw.proto.lex.tsv @@ -0,0 +1,32 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:647 19:1:0 "import" + 27:0x001b TokenSpace 647:648 19:7:6 " " + 28:0x001c TokenString 648:659 19:8:7 "foo.proto" "\"foo.proto\"" + 29:0x001d TokenPunct 659:660 19:19:18 ";" + 30:0x001e TokenSpace 660:661 19:20:19 "\n" + 31:0x001f TokenIdent 661:667 20:1:0 "import" diff --git a/experimental/ast/testdata/parser/import/eof_after_kw.proto.stderr b/experimental/ast/testdata/parser/import/eof_after_kw.proto.stderr new file mode 100644 index 00000000..21c45159 --- /dev/null +++ b/experimental/ast/testdata/parser/import/eof_after_kw.proto.stderr @@ -0,0 +1,13 @@ +error: unexpected string literal in import + --> testdata/parser/import/eof_after_kw.proto:19:8 + | +19 | import "foo.proto"; + | ^^^^^^^^^^^ expected string literal + +error: import is missing a file path + --> testdata/parser/import/eof_after_kw.proto:20:1 + | +20 | import + | ^^^^^^ help: insert the name of the file to import after this keyword + +encountered 2 errors diff --git a/experimental/ast/testdata/parser/import/escapes.proto b/experimental/ast/testdata/parser/import/escapes.proto new file mode 100644 index 00000000..f95c53eb --- /dev/null +++ b/experimental/ast/testdata/parser/import/escapes.proto @@ -0,0 +1,20 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +import "foo\x2eproto"; +import "bar" ".proto"; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/import/escapes.proto.ast.json b/experimental/ast/testdata/parser/import/escapes.proto.ast.json new file mode 100644 index 00000000..5e4ff0e7 --- /dev/null +++ b/experimental/ast/testdata/parser/import/escapes.proto.ast.json @@ -0,0 +1,67 @@ +{ + "file": { + "path": "testdata/parser/import/escapes.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgppbXBvcnQgImZvb1x4MmVwcm90byI7CmltcG9ydCAiYmFyIiAiLnByb3RvIjs=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "import": { + "importPath": { + "literal": { + "stringValue": "foo.proto", + "span": { "start": 648, "end": 662 } + } + }, + "span": { "start": 641, "end": 663 }, + "keywordSpan": { "start": 641, "end": 647 }, + "importPathSpan": { "start": 648, "end": 662 }, + "semicolonSpan": { "start": 662, "end": 663 } + } + }, + { + "import": { + "importPath": { + "literal": { + "stringValue": "bar.proto", + "span": { "start": 671, "end": 685 } + } + }, + "span": { "start": 664, "end": 686 }, + "keywordSpan": { "start": 664, "end": 670 }, + "importPathSpan": { "start": 671, "end": 685 }, + "semicolonSpan": { "start": 685, "end": 686 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/import/escapes.proto.lex.tsv b/experimental/ast/testdata/parser/import/escapes.proto.lex.tsv new file mode 100644 index 00000000..0f077df0 --- /dev/null +++ b/experimental/ast/testdata/parser/import/escapes.proto.lex.tsv @@ -0,0 +1,37 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:647 19:1:0 "import" + 27:0x001b TokenSpace 647:648 19:7:6 " " + 28:0x001c TokenString 648:662 19:8:7 "foo.proto" "\"foo\\x2eproto\"" + 29:0x001d TokenPunct 662:663 19:22:21 ";" + 30:0x001e TokenSpace 663:664 19:23:22 "\n" + 31:0x001f TokenIdent 664:670 20:1:0 "import" + 32:0x0020 TokenSpace 670:671 20:7:6 " " + 33:0x0021 TokenString 671:685 20:8:7 "bar.proto" "\"bar\"" + 34:0x0022 TokenSpace 676:677 20:13:12 " " + 35:0x0023 TokenString 671:685 20:8:7 ".proto" "\".proto\"" + 36:0x0024 TokenPunct 685:686 20:22:21 ";" diff --git a/experimental/ast/testdata/parser/import/escapes.proto.stderr b/experimental/ast/testdata/parser/import/escapes.proto.stderr new file mode 100644 index 00000000..687d772a --- /dev/null +++ b/experimental/ast/testdata/parser/import/escapes.proto.stderr @@ -0,0 +1,25 @@ +warning: import path should be a single, escape-less string + --> testdata/parser/import/escapes.proto:19:8 + | +19 | import "foo\x2eproto"; + | ^^^^^^^^^^^^^^ help: change this to "foo.proto" + +error: unexpected string literal in import + --> testdata/parser/import/escapes.proto:19:8 + | +19 | import "foo\x2eproto"; + | ^^^^^^^^^^^^^^ expected string literal + +warning: import path should be a single, escape-less string + --> testdata/parser/import/escapes.proto:20:8 + | +20 | import "bar" ".proto"; + | ^^^^^^^^^^^^^^ help: change this to "bar.proto" + +error: unexpected string literal in import + --> testdata/parser/import/escapes.proto:20:8 + | +20 | import "bar" ".proto"; + | ^^^^^^^^^^^^^^ expected string literal + +encountered 2 errors and 2 warnings diff --git a/experimental/ast/testdata/parser/import/no_path.proto b/experimental/ast/testdata/parser/import/no_path.proto new file mode 100644 index 00000000..4bd9539c --- /dev/null +++ b/experimental/ast/testdata/parser/import/no_path.proto @@ -0,0 +1,21 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +import; +import weak; +import public; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/import/no_path.proto.ast.json b/experimental/ast/testdata/parser/import/no_path.proto.ast.json new file mode 100644 index 00000000..4dd535e6 --- /dev/null +++ b/experimental/ast/testdata/parser/import/no_path.proto.ast.json @@ -0,0 +1,64 @@ +{ + "file": { + "path": "testdata/parser/import/no_path.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgppbXBvcnQ7CmltcG9ydCB3ZWFrOwppbXBvcnQgcHVibGljOw==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "import": { + "span": { "start": 641, "end": 648 }, + "keywordSpan": { "start": 641, "end": 647 }, + "semicolonSpan": { "start": 647, "end": 648 } + } + }, + { + "import": { + "modifier": "MODIFIER_WEAK", + "span": { "start": 649, "end": 661 }, + "keywordSpan": { "start": 649, "end": 655 }, + "modifierSpan": { "start": 656, "end": 660 }, + "semicolonSpan": { "start": 660, "end": 661 } + } + }, + { + "import": { + "modifier": "MODIFIER_PUBLIC", + "span": { "start": 662, "end": 676 }, + "keywordSpan": { "start": 662, "end": 668 }, + "modifierSpan": { "start": 669, "end": 675 }, + "semicolonSpan": { "start": 675, "end": 676 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/import/no_path.proto.lex.tsv b/experimental/ast/testdata/parser/import/no_path.proto.lex.tsv new file mode 100644 index 00000000..844e44dc --- /dev/null +++ b/experimental/ast/testdata/parser/import/no_path.proto.lex.tsv @@ -0,0 +1,38 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:647 19:1:0 "import" + 27:0x001b TokenPunct 647:648 19:7:6 ";" + 28:0x001c TokenSpace 648:649 19:8:7 "\n" + 29:0x001d TokenIdent 649:655 20:1:0 "import" + 30:0x001e TokenSpace 655:656 20:7:6 " " + 31:0x001f TokenIdent 656:660 20:8:7 "weak" + 32:0x0020 TokenPunct 660:661 20:12:11 ";" + 33:0x0021 TokenSpace 661:662 20:13:12 "\n" + 34:0x0022 TokenIdent 662:668 21:1:0 "import" + 35:0x0023 TokenSpace 668:669 21:7:6 " " + 36:0x0024 TokenIdent 669:675 21:8:7 "public" + 37:0x0025 TokenPunct 675:676 21:14:13 ";" diff --git a/experimental/ast/testdata/parser/import/no_path.proto.stderr b/experimental/ast/testdata/parser/import/no_path.proto.stderr new file mode 100644 index 00000000..595e321e --- /dev/null +++ b/experimental/ast/testdata/parser/import/no_path.proto.stderr @@ -0,0 +1,25 @@ +error: import is missing a file path + --> testdata/parser/import/no_path.proto:19:1 + | +19 | import; + | ^^^^^^ help: insert the name of the file to import after this keyword + +error: import is missing a file path + --> testdata/parser/import/no_path.proto:20:8 + | +20 | import weak; + | ^^^^ help: insert the name of the file to import after this keyword + +warning: weak imports are discouraged and broken in some runtimes + --> testdata/parser/import/no_path.proto:20:8 + | +20 | import weak; + | ^^^^ + +error: import is missing a file path + --> testdata/parser/import/no_path.proto:21:8 + | +21 | import public; + | ^^^^^^ help: insert the name of the file to import after this keyword + +encountered 3 errors and 1 warning diff --git a/experimental/ast/testdata/parser/import/ok.proto b/experimental/ast/testdata/parser/import/ok.proto new file mode 100644 index 00000000..9ebaa986 --- /dev/null +++ b/experimental/ast/testdata/parser/import/ok.proto @@ -0,0 +1,21 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +import "foo.proto"; +import weak "weak.proto"; +import public "public.proto"; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/import/ok.proto.ast.json b/experimental/ast/testdata/parser/import/ok.proto.ast.json new file mode 100644 index 00000000..c2235d47 --- /dev/null +++ b/experimental/ast/testdata/parser/import/ok.proto.ast.json @@ -0,0 +1,85 @@ +{ + "file": { + "path": "testdata/parser/import/ok.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgppbXBvcnQgImZvby5wcm90byI7CmltcG9ydCB3ZWFrICJ3ZWFrLnByb3RvIjsKaW1wb3J0IHB1YmxpYyAicHVibGljLnByb3RvIjs=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "import": { + "importPath": { + "literal": { + "stringValue": "foo.proto", + "span": { "start": 648, "end": 659 } + } + }, + "span": { "start": 641, "end": 660 }, + "keywordSpan": { "start": 641, "end": 647 }, + "importPathSpan": { "start": 648, "end": 659 }, + "semicolonSpan": { "start": 659, "end": 660 } + } + }, + { + "import": { + "modifier": "MODIFIER_WEAK", + "importPath": { + "literal": { + "stringValue": "weak.proto", + "span": { "start": 673, "end": 685 } + } + }, + "span": { "start": 661, "end": 686 }, + "keywordSpan": { "start": 661, "end": 667 }, + "modifierSpan": { "start": 668, "end": 672 }, + "importPathSpan": { "start": 673, "end": 685 }, + "semicolonSpan": { "start": 685, "end": 686 } + } + }, + { + "import": { + "modifier": "MODIFIER_PUBLIC", + "importPath": { + "literal": { + "stringValue": "public.proto", + "span": { "start": 701, "end": 715 } + } + }, + "span": { "start": 687, "end": 716 }, + "keywordSpan": { "start": 687, "end": 693 }, + "modifierSpan": { "start": 694, "end": 700 }, + "importPathSpan": { "start": 701, "end": 715 }, + "semicolonSpan": { "start": 715, "end": 716 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/import/ok.proto.lex.tsv b/experimental/ast/testdata/parser/import/ok.proto.lex.tsv new file mode 100644 index 00000000..fdcee9a0 --- /dev/null +++ b/experimental/ast/testdata/parser/import/ok.proto.lex.tsv @@ -0,0 +1,44 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:647 19:1:0 "import" + 27:0x001b TokenSpace 647:648 19:7:6 " " + 28:0x001c TokenString 648:659 19:8:7 "foo.proto" "\"foo.proto\"" + 29:0x001d TokenPunct 659:660 19:19:18 ";" + 30:0x001e TokenSpace 660:661 19:20:19 "\n" + 31:0x001f TokenIdent 661:667 20:1:0 "import" + 32:0x0020 TokenSpace 667:668 20:7:6 " " + 33:0x0021 TokenIdent 668:672 20:8:7 "weak" + 34:0x0022 TokenSpace 672:673 20:12:11 " " + 35:0x0023 TokenString 673:685 20:13:12 "weak.proto" "\"weak.proto\"" + 36:0x0024 TokenPunct 685:686 20:25:24 ";" + 37:0x0025 TokenSpace 686:687 20:26:25 "\n" + 38:0x0026 TokenIdent 687:693 21:1:0 "import" + 39:0x0027 TokenSpace 693:694 21:7:6 " " + 40:0x0028 TokenIdent 694:700 21:8:7 "public" + 41:0x0029 TokenSpace 700:701 21:14:13 " " + 42:0x002a TokenString 701:715 21:15:14 "public.proto" "\"public.proto\"" + 43:0x002b TokenPunct 715:716 21:29:28 ";" diff --git a/experimental/ast/testdata/parser/import/ok.proto.stderr b/experimental/ast/testdata/parser/import/ok.proto.stderr new file mode 100644 index 00000000..a8b6f8cd --- /dev/null +++ b/experimental/ast/testdata/parser/import/ok.proto.stderr @@ -0,0 +1,25 @@ +error: unexpected string literal in import + --> testdata/parser/import/ok.proto:19:8 + | +19 | import "foo.proto"; + | ^^^^^^^^^^^ expected string literal + +warning: weak imports are discouraged and broken in some runtimes + --> testdata/parser/import/ok.proto:20:8 + | +20 | import weak "weak.proto"; + | ^^^^ + +error: unexpected string literal in weak import + --> testdata/parser/import/ok.proto:20:13 + | +20 | import weak "weak.proto"; + | ^^^^^^^^^^^^ expected string literal + +error: unexpected string literal in public import + --> testdata/parser/import/ok.proto:21:15 + | +21 | import public "public.proto"; + | ^^^^^^^^^^^^^^ expected string literal + +encountered 3 errors and 1 warning diff --git a/experimental/ast/testdata/parser/import/options.proto b/experimental/ast/testdata/parser/import/options.proto new file mode 100644 index 00000000..dc5f60a5 --- /dev/null +++ b/experimental/ast/testdata/parser/import/options.proto @@ -0,0 +1,21 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +import "foo.proto" [(not.allowed) = "here"]; +import weak "weak.proto" [(not.allowed) = "here"]; +import public "public.proto" [(not.allowed) = "here"]; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/import/options.proto.ast.json b/experimental/ast/testdata/parser/import/options.proto.ast.json new file mode 100644 index 00000000..1de61ef2 --- /dev/null +++ b/experimental/ast/testdata/parser/import/options.proto.ast.json @@ -0,0 +1,196 @@ +{ + "file": { + "path": "testdata/parser/import/options.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgppbXBvcnQgImZvby5wcm90byIgWyhub3QuYWxsb3dlZCkgPSAiaGVyZSJdOwppbXBvcnQgd2VhayAid2Vhay5wcm90byIgWyhub3QuYWxsb3dlZCkgPSAiaGVyZSJdOwppbXBvcnQgcHVibGljICJwdWJsaWMucHJvdG8iIFsobm90LmFsbG93ZWQpID0gImhlcmUiXTs=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "import": { + "importPath": { + "literal": { + "stringValue": "foo.proto", + "span": { "start": 648, "end": 659 } + } + }, + "options": { + "entries": [ + { + "path": { + "components": [ + { + "extension": { + "components": [ + { + "ident": "not", + "componentSpan": { "start": 662, "end": 665 } + }, + { + "ident": "allowed", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 666, "end": 673 }, + "separatorSpan": { "start": 665, "end": 666 } + } + ], + "span": { "start": 662, "end": 673 } + }, + "componentSpan": { "start": 662, "end": 673 } + } + ], + "span": { "start": 661, "end": 674 } + }, + "value": { + "literal": { + "stringValue": "here", + "span": { "start": 677, "end": 683 } + } + }, + "equalsSpan": { "start": 675, "end": 676 } + } + ], + "span": { "start": 660, "end": 684 } + }, + "span": { "start": 641, "end": 685 }, + "keywordSpan": { "start": 641, "end": 647 }, + "importPathSpan": { "start": 648, "end": 659 }, + "semicolonSpan": { "start": 684, "end": 685 } + } + }, + { + "import": { + "modifier": "MODIFIER_WEAK", + "importPath": { + "literal": { + "stringValue": "weak.proto", + "span": { "start": 698, "end": 710 } + } + }, + "options": { + "entries": [ + { + "path": { + "components": [ + { + "extension": { + "components": [ + { + "ident": "not", + "componentSpan": { "start": 713, "end": 716 } + }, + { + "ident": "allowed", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 717, "end": 724 }, + "separatorSpan": { "start": 716, "end": 717 } + } + ], + "span": { "start": 713, "end": 724 } + }, + "componentSpan": { "start": 713, "end": 724 } + } + ], + "span": { "start": 712, "end": 725 } + }, + "value": { + "literal": { + "stringValue": "here", + "span": { "start": 728, "end": 734 } + } + }, + "equalsSpan": { "start": 726, "end": 727 } + } + ], + "span": { "start": 711, "end": 735 } + }, + "span": { "start": 686, "end": 736 }, + "keywordSpan": { "start": 686, "end": 692 }, + "modifierSpan": { "start": 693, "end": 697 }, + "importPathSpan": { "start": 698, "end": 710 }, + "semicolonSpan": { "start": 735, "end": 736 } + } + }, + { + "import": { + "modifier": "MODIFIER_PUBLIC", + "importPath": { + "literal": { + "stringValue": "public.proto", + "span": { "start": 751, "end": 765 } + } + }, + "options": { + "entries": [ + { + "path": { + "components": [ + { + "extension": { + "components": [ + { + "ident": "not", + "componentSpan": { "start": 768, "end": 771 } + }, + { + "ident": "allowed", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 772, "end": 779 }, + "separatorSpan": { "start": 771, "end": 772 } + } + ], + "span": { "start": 768, "end": 779 } + }, + "componentSpan": { "start": 768, "end": 779 } + } + ], + "span": { "start": 767, "end": 780 } + }, + "value": { + "literal": { + "stringValue": "here", + "span": { "start": 783, "end": 789 } + } + }, + "equalsSpan": { "start": 781, "end": 782 } + } + ], + "span": { "start": 766, "end": 790 } + }, + "span": { "start": 737, "end": 791 }, + "keywordSpan": { "start": 737, "end": 743 }, + "modifierSpan": { "start": 744, "end": 750 }, + "importPathSpan": { "start": 751, "end": 765 }, + "semicolonSpan": { "start": 790, "end": 791 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/import/options.proto.lex.tsv b/experimental/ast/testdata/parser/import/options.proto.lex.tsv new file mode 100644 index 00000000..dd4cb939 --- /dev/null +++ b/experimental/ast/testdata/parser/import/options.proto.lex.tsv @@ -0,0 +1,80 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:647 19:1:0 "import" + 27:0x001b TokenSpace 647:648 19:7:6 " " + 28:0x001c TokenString 648:659 19:8:7 "foo.proto" "\"foo.proto\"" + 29:0x001d TokenSpace 659:660 19:19:18 " " + 30:0x001e TokenPunct 660:684 19:20:19 "[" + 31:0x001f TokenPunct 661:674 19:21:20 "(" + 32:0x0020 TokenIdent 662:665 19:22:21 "not" + 33:0x0021 TokenPunct 665:666 19:25:24 "." + 34:0x0022 TokenIdent 666:673 19:26:25 "allowed" + 35:0x0023 TokenPunct 661:674 19:21:20 ")" + 36:0x0024 TokenSpace 674:675 19:34:33 " " + 37:0x0025 TokenPunct 675:676 19:35:34 "=" + 38:0x0026 TokenSpace 676:677 19:36:35 " " + 39:0x0027 TokenString 677:683 19:37:36 "here" "\"here\"" + 40:0x0028 TokenPunct 660:684 19:20:19 "]" + 41:0x0029 TokenPunct 684:685 19:44:43 ";" + 42:0x002a TokenSpace 685:686 19:45:44 "\n" + 43:0x002b TokenIdent 686:692 20:1:0 "import" + 44:0x002c TokenSpace 692:693 20:7:6 " " + 45:0x002d TokenIdent 693:697 20:8:7 "weak" + 46:0x002e TokenSpace 697:698 20:12:11 " " + 47:0x002f TokenString 698:710 20:13:12 "weak.proto" "\"weak.proto\"" + 48:0x0030 TokenSpace 710:711 20:25:24 " " + 49:0x0031 TokenPunct 711:735 20:26:25 "[" + 50:0x0032 TokenPunct 712:725 20:27:26 "(" + 51:0x0033 TokenIdent 713:716 20:28:27 "not" + 52:0x0034 TokenPunct 716:717 20:31:30 "." + 53:0x0035 TokenIdent 717:724 20:32:31 "allowed" + 54:0x0036 TokenPunct 712:725 20:27:26 ")" + 55:0x0037 TokenSpace 725:726 20:40:39 " " + 56:0x0038 TokenPunct 726:727 20:41:40 "=" + 57:0x0039 TokenSpace 727:728 20:42:41 " " + 58:0x003a TokenString 728:734 20:43:42 "here" "\"here\"" + 59:0x003b TokenPunct 711:735 20:26:25 "]" + 60:0x003c TokenPunct 735:736 20:50:49 ";" + 61:0x003d TokenSpace 736:737 20:51:50 "\n" + 62:0x003e TokenIdent 737:743 21:1:0 "import" + 63:0x003f TokenSpace 743:744 21:7:6 " " + 64:0x0040 TokenIdent 744:750 21:8:7 "public" + 65:0x0041 TokenSpace 750:751 21:14:13 " " + 66:0x0042 TokenString 751:765 21:15:14 "public.proto" "\"public.proto\"" + 67:0x0043 TokenSpace 765:766 21:29:28 " " + 68:0x0044 TokenPunct 766:790 21:30:29 "[" + 69:0x0045 TokenPunct 767:780 21:31:30 "(" + 70:0x0046 TokenIdent 768:771 21:32:31 "not" + 71:0x0047 TokenPunct 771:772 21:35:34 "." + 72:0x0048 TokenIdent 772:779 21:36:35 "allowed" + 73:0x0049 TokenPunct 767:780 21:31:30 ")" + 74:0x004a TokenSpace 780:781 21:44:43 " " + 75:0x004b TokenPunct 781:782 21:45:44 "=" + 76:0x004c TokenSpace 782:783 21:46:45 " " + 77:0x004d TokenString 783:789 21:47:46 "here" "\"here\"" + 78:0x004e TokenPunct 766:790 21:30:29 "]" + 79:0x004f TokenPunct 790:791 21:54:53 ";" diff --git a/experimental/ast/testdata/parser/import/options.proto.stderr b/experimental/ast/testdata/parser/import/options.proto.stderr new file mode 100644 index 00000000..99c78551 --- /dev/null +++ b/experimental/ast/testdata/parser/import/options.proto.stderr @@ -0,0 +1,43 @@ +error: unexpected string literal in import + --> testdata/parser/import/options.proto:19:8 + | +19 | import "foo.proto" [(not.allowed) = "here"]; + | ^^^^^^^^^^^ expected string literal + +error: options are not permitted on syntax declarations + --> testdata/parser/import/options.proto:19:20 + | +19 | import "foo.proto" [(not.allowed) = "here"]; + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this + +warning: weak imports are discouraged and broken in some runtimes + --> testdata/parser/import/options.proto:20:8 + | +20 | import weak "weak.proto" [(not.allowed) = "here"]; + | ^^^^ + +error: unexpected string literal in weak import + --> testdata/parser/import/options.proto:20:13 + | +20 | import weak "weak.proto" [(not.allowed) = "here"]; + | ^^^^^^^^^^^^ expected string literal + +error: options are not permitted on syntax declarations + --> testdata/parser/import/options.proto:20:26 + | +20 | import weak "weak.proto" [(not.allowed) = "here"]; + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this + +error: unexpected string literal in public import + --> testdata/parser/import/options.proto:21:15 + | +21 | import public "public.proto" [(not.allowed) = "here"]; + | ^^^^^^^^^^^^^^ expected string literal + +error: options are not permitted on syntax declarations + --> testdata/parser/import/options.proto:21:30 + | +21 | import public "public.proto" [(not.allowed) = "here"]; + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this + +encountered 6 errors and 1 warning diff --git a/experimental/ast/testdata/parser/import/repeated.proto b/experimental/ast/testdata/parser/import/repeated.proto new file mode 100644 index 00000000..4467f12e --- /dev/null +++ b/experimental/ast/testdata/parser/import/repeated.proto @@ -0,0 +1,22 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +import "foo.proto"; +import "foo.proto"; // Second + +import "foo\x2eproto"; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/import/repeated.proto.ast.json b/experimental/ast/testdata/parser/import/repeated.proto.ast.json new file mode 100644 index 00000000..6ecdbc87 --- /dev/null +++ b/experimental/ast/testdata/parser/import/repeated.proto.ast.json @@ -0,0 +1,81 @@ +{ + "file": { + "path": "testdata/parser/import/repeated.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgppbXBvcnQgImZvby5wcm90byI7CmltcG9ydCAiZm9vLnByb3RvIjsgLy8gU2Vjb25kCgppbXBvcnQgImZvb1x4MmVwcm90byI7" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "import": { + "importPath": { + "literal": { + "stringValue": "foo.proto", + "span": { "start": 648, "end": 659 } + } + }, + "span": { "start": 641, "end": 660 }, + "keywordSpan": { "start": 641, "end": 647 }, + "importPathSpan": { "start": 648, "end": 659 }, + "semicolonSpan": { "start": 659, "end": 660 } + } + }, + { + "import": { + "importPath": { + "literal": { + "stringValue": "foo.proto", + "span": { "start": 668, "end": 679 } + } + }, + "span": { "start": 661, "end": 680 }, + "keywordSpan": { "start": 661, "end": 667 }, + "importPathSpan": { "start": 668, "end": 679 }, + "semicolonSpan": { "start": 679, "end": 680 } + } + }, + { + "import": { + "importPath": { + "literal": { + "stringValue": "foo.proto", + "span": { "start": 699, "end": 713 } + } + }, + "span": { "start": 692, "end": 714 }, + "keywordSpan": { "start": 692, "end": 698 }, + "importPathSpan": { "start": 699, "end": 713 }, + "semicolonSpan": { "start": 713, "end": 714 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/import/repeated.proto.lex.tsv b/experimental/ast/testdata/parser/import/repeated.proto.lex.tsv new file mode 100644 index 00000000..55e6944e --- /dev/null +++ b/experimental/ast/testdata/parser/import/repeated.proto.lex.tsv @@ -0,0 +1,42 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:647 19:1:0 "import" + 27:0x001b TokenSpace 647:648 19:7:6 " " + 28:0x001c TokenString 648:659 19:8:7 "foo.proto" "\"foo.proto\"" + 29:0x001d TokenPunct 659:660 19:19:18 ";" + 30:0x001e TokenSpace 660:661 19:20:19 "\n" + 31:0x001f TokenIdent 661:667 20:1:0 "import" + 32:0x0020 TokenSpace 667:668 20:7:6 " " + 33:0x0021 TokenString 668:679 20:8:7 "foo.proto" "\"foo.proto\"" + 34:0x0022 TokenPunct 679:680 20:19:18 ";" + 35:0x0023 TokenSpace 680:681 20:20:19 " " + 36:0x0024 TokenComment 681:691 20:21:20 "// Second\n" + 37:0x0025 TokenSpace 691:692 21:1:0 "\n" + 38:0x0026 TokenIdent 692:698 22:1:0 "import" + 39:0x0027 TokenSpace 698:699 22:7:6 " " + 40:0x0028 TokenString 699:713 22:8:7 "foo.proto" "\"foo\\x2eproto\"" + 41:0x0029 TokenPunct 713:714 22:22:21 ";" diff --git a/experimental/ast/testdata/parser/import/repeated.proto.stderr b/experimental/ast/testdata/parser/import/repeated.proto.stderr new file mode 100644 index 00000000..1ba7c6c8 --- /dev/null +++ b/experimental/ast/testdata/parser/import/repeated.proto.stderr @@ -0,0 +1,43 @@ +error: unexpected string literal in import + --> testdata/parser/import/repeated.proto:19:8 + | +19 | import "foo.proto"; + | ^^^^^^^^^^^ expected string literal + +warning: "foo.proto" imported twice + --> testdata/parser/import/repeated.proto:20:1 + | +19 | import "foo.proto"; + | ------------------- previously imported here +20 | import "foo.proto"; // Second + | ^^^^^^^^^^^^^^^^^^^ help: remove this import + +error: unexpected string literal in import + --> testdata/parser/import/repeated.proto:20:8 + | +20 | import "foo.proto"; // Second + | ^^^^^^^^^^^ expected string literal + +warning: "foo.proto" imported twice + --> testdata/parser/import/repeated.proto:22:1 + | +19 | import "foo.proto"; + | ------------------- previously imported here +20 | import "foo.proto"; // Second +... +22 | import "foo\x2eproto"; + | ^^^^^^^^^^^^^^^^^^^^^^ help: remove this import + +warning: import path should be a single, escape-less string + --> testdata/parser/import/repeated.proto:22:8 + | +22 | import "foo\x2eproto"; + | ^^^^^^^^^^^^^^ help: change this to "foo.proto" + +error: unexpected string literal in import + --> testdata/parser/import/repeated.proto:22:8 + | +22 | import "foo\x2eproto"; + | ^^^^^^^^^^^^^^ expected string literal + +encountered 3 errors and 3 warnings diff --git a/experimental/ast/testdata/parser/import/symbol.proto b/experimental/ast/testdata/parser/import/symbol.proto new file mode 100644 index 00000000..7435995e --- /dev/null +++ b/experimental/ast/testdata/parser/import/symbol.proto @@ -0,0 +1,21 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +import my.Proto; +import weak my.Proto; +import public my.Proto; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/import/symbol.proto.ast.json b/experimental/ast/testdata/parser/import/symbol.proto.ast.json new file mode 100644 index 00000000..f338dc04 --- /dev/null +++ b/experimental/ast/testdata/parser/import/symbol.proto.ast.json @@ -0,0 +1,118 @@ +{ + "file": { + "path": "testdata/parser/import/symbol.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgppbXBvcnQgbXkuUHJvdG87CmltcG9ydCB3ZWFrIG15LlByb3RvOwppbXBvcnQgcHVibGljIG15LlByb3RvOw==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "import": { + "importPath": { + "path": { + "components": [ + { + "ident": "my", + "componentSpan": { "start": 648, "end": 650 } + }, + { + "ident": "Proto", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 651, "end": 656 }, + "separatorSpan": { "start": 650, "end": 651 } + } + ], + "span": { "start": 648, "end": 656 } + } + }, + "span": { "start": 641, "end": 657 }, + "keywordSpan": { "start": 641, "end": 647 }, + "importPathSpan": { "start": 648, "end": 656 }, + "semicolonSpan": { "start": 656, "end": 657 } + } + }, + { + "import": { + "modifier": "MODIFIER_WEAK", + "importPath": { + "path": { + "components": [ + { + "ident": "my", + "componentSpan": { "start": 670, "end": 672 } + }, + { + "ident": "Proto", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 673, "end": 678 }, + "separatorSpan": { "start": 672, "end": 673 } + } + ], + "span": { "start": 670, "end": 678 } + } + }, + "span": { "start": 658, "end": 679 }, + "keywordSpan": { "start": 658, "end": 664 }, + "modifierSpan": { "start": 665, "end": 669 }, + "importPathSpan": { "start": 670, "end": 678 }, + "semicolonSpan": { "start": 678, "end": 679 } + } + }, + { + "import": { + "modifier": "MODIFIER_PUBLIC", + "importPath": { + "path": { + "components": [ + { + "ident": "my", + "componentSpan": { "start": 694, "end": 696 } + }, + { + "ident": "Proto", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 697, "end": 702 }, + "separatorSpan": { "start": 696, "end": 697 } + } + ], + "span": { "start": 694, "end": 702 } + } + }, + "span": { "start": 680, "end": 703 }, + "keywordSpan": { "start": 680, "end": 686 }, + "modifierSpan": { "start": 687, "end": 693 }, + "importPathSpan": { "start": 694, "end": 702 }, + "semicolonSpan": { "start": 702, "end": 703 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/import/symbol.proto.lex.tsv b/experimental/ast/testdata/parser/import/symbol.proto.lex.tsv new file mode 100644 index 00000000..0cc05936 --- /dev/null +++ b/experimental/ast/testdata/parser/import/symbol.proto.lex.tsv @@ -0,0 +1,50 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:647 19:1:0 "import" + 27:0x001b TokenSpace 647:648 19:7:6 " " + 28:0x001c TokenIdent 648:650 19:8:7 "my" + 29:0x001d TokenPunct 650:651 19:10:9 "." + 30:0x001e TokenIdent 651:656 19:11:10 "Proto" + 31:0x001f TokenPunct 656:657 19:16:15 ";" + 32:0x0020 TokenSpace 657:658 19:17:16 "\n" + 33:0x0021 TokenIdent 658:664 20:1:0 "import" + 34:0x0022 TokenSpace 664:665 20:7:6 " " + 35:0x0023 TokenIdent 665:669 20:8:7 "weak" + 36:0x0024 TokenSpace 669:670 20:12:11 " " + 37:0x0025 TokenIdent 670:672 20:13:12 "my" + 38:0x0026 TokenPunct 672:673 20:15:14 "." + 39:0x0027 TokenIdent 673:678 20:16:15 "Proto" + 40:0x0028 TokenPunct 678:679 20:21:20 ";" + 41:0x0029 TokenSpace 679:680 20:22:21 "\n" + 42:0x002a TokenIdent 680:686 21:1:0 "import" + 43:0x002b TokenSpace 686:687 21:7:6 " " + 44:0x002c TokenIdent 687:693 21:8:7 "public" + 45:0x002d TokenSpace 693:694 21:14:13 " " + 46:0x002e TokenIdent 694:696 21:15:14 "my" + 47:0x002f TokenPunct 696:697 21:17:16 "." + 48:0x0030 TokenIdent 697:702 21:18:17 "Proto" + 49:0x0031 TokenPunct 702:703 21:23:22 ";" diff --git a/experimental/ast/testdata/parser/import/symbol.proto.stderr b/experimental/ast/testdata/parser/import/symbol.proto.stderr new file mode 100644 index 00000000..db1321e7 --- /dev/null +++ b/experimental/ast/testdata/parser/import/symbol.proto.stderr @@ -0,0 +1,25 @@ +error: cannot import by Protobuf symbol + --> testdata/parser/import/symbol.proto:19:8 + | +19 | import my.Proto; + | ^^^^^^^^ expected a quoted filesystem path + +warning: weak imports are discouraged and broken in some runtimes + --> testdata/parser/import/symbol.proto:20:8 + | +20 | import weak my.Proto; + | ^^^^ + +error: cannot import by Protobuf symbol + --> testdata/parser/import/symbol.proto:20:13 + | +20 | import weak my.Proto; + | ^^^^^^^^ expected a quoted filesystem path + +error: cannot import by Protobuf symbol + --> testdata/parser/import/symbol.proto:21:15 + | +21 | import public my.Proto; + | ^^^^^^^^ expected a quoted filesystem path + +encountered 3 errors and 1 warning diff --git a/experimental/ast/testdata/parser/package/42.proto b/experimental/ast/testdata/parser/package/42.proto new file mode 100644 index 00000000..a7a2448f --- /dev/null +++ b/experimental/ast/testdata/parser/package/42.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package 42; diff --git a/experimental/ast/testdata/parser/package/42.proto.ast.json b/experimental/ast/testdata/parser/package/42.proto.ast.json new file mode 100644 index 00000000..dc19116a --- /dev/null +++ b/experimental/ast/testdata/parser/package/42.proto.ast.json @@ -0,0 +1,35 @@ +{ + "file": { + "path": "testdata/parser/package/42.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIDQyOwo=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "span": { "start": 626, "end": 636 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 634, "end": 636 } + } + }, + { + "empty": { + "span": { "start": 636, "end": 637 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/42.proto.lex.tsv b/experimental/ast/testdata/parser/package/42.proto.lex.tsv new file mode 100644 index 00000000..ff7e55ef --- /dev/null +++ b/experimental/ast/testdata/parser/package/42.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenNumber 634:636 17:9:8 42 "42" + 24:0x0018 TokenPunct 636:637 17:11:10 ";" + 25:0x0019 TokenSpace 637:638 17:12:11 "\n" diff --git a/experimental/ast/testdata/parser/package/42.proto.stderr b/experimental/ast/testdata/parser/package/42.proto.stderr new file mode 100644 index 00000000..c8f5f253 --- /dev/null +++ b/experimental/ast/testdata/parser/package/42.proto.stderr @@ -0,0 +1,7 @@ +error: missing package name + --> testdata/parser/package/42.proto:17:1 + | +17 | package 42; + | ^^^^^^^^^^ help: add a path after `package` + +encountered 1 error diff --git a/experimental/ast/testdata/parser/package/absolute.proto b/experimental/ast/testdata/parser/package/absolute.proto new file mode 100644 index 00000000..cce03470 --- /dev/null +++ b/experimental/ast/testdata/parser/package/absolute.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package .test.test2; diff --git a/experimental/ast/testdata/parser/package/absolute.proto.ast.json b/experimental/ast/testdata/parser/package/absolute.proto.ast.json new file mode 100755 index 00000000..07a15d5c --- /dev/null +++ b/experimental/ast/testdata/parser/package/absolute.proto.ast.json @@ -0,0 +1,47 @@ +{ + "file": { + "path": "testdata/parser/package/absolute.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIC50ZXN0LnRlc3QyOwo=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 635, "end": 639 }, + "separatorSpan": { "start": 634, "end": 635 } + }, + { + "ident": "test2", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 640, "end": 645 }, + "separatorSpan": { "start": 639, "end": 640 } + } + ], + "span": { "start": 634, "end": 645 } + }, + "span": { "start": 626, "end": 646 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 645, "end": 646 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/absolute.proto.lex.tsv b/experimental/ast/testdata/parser/package/absolute.proto.lex.tsv new file mode 100755 index 00000000..9ba32e8a --- /dev/null +++ b/experimental/ast/testdata/parser/package/absolute.proto.lex.tsv @@ -0,0 +1,29 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenPunct 634:635 17:9:8 "." + 24:0x0018 TokenIdent 635:639 17:10:9 "test" + 25:0x0019 TokenPunct 639:640 17:14:13 "." + 26:0x001a TokenIdent 640:645 17:15:14 "test2" + 27:0x001b TokenPunct 645:646 17:20:19 ";" + 28:0x001c TokenSpace 646:647 17:21:20 "\n" diff --git a/experimental/ast/testdata/parser/package/absolute.proto.stderr b/experimental/ast/testdata/parser/package/absolute.proto.stderr new file mode 100755 index 00000000..4e096f76 --- /dev/null +++ b/experimental/ast/testdata/parser/package/absolute.proto.stderr @@ -0,0 +1,7 @@ +error: package names cannot be absolute paths + --> testdata/parser/package/absolute.proto:17:9 + | +17 | package .test.test2; + | ^ help: remove this dot + +encountered 1 error diff --git a/experimental/ast/testdata/parser/package/empty.proto b/experimental/ast/testdata/parser/package/empty.proto new file mode 100644 index 00000000..bf69749a --- /dev/null +++ b/experimental/ast/testdata/parser/package/empty.proto @@ -0,0 +1,15 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; diff --git a/experimental/ast/testdata/parser/package/empty.proto.ast.json b/experimental/ast/testdata/parser/package/empty.proto.ast.json new file mode 100755 index 00000000..3334afa7 --- /dev/null +++ b/experimental/ast/testdata/parser/package/empty.proto.ast.json @@ -0,0 +1,23 @@ +{ + "file": { + "path": "testdata/parser/package/empty.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/empty.proto.lex.tsv b/experimental/ast/testdata/parser/package/empty.proto.lex.tsv new file mode 100755 index 00000000..a941bff3 --- /dev/null +++ b/experimental/ast/testdata/parser/package/empty.proto.lex.tsv @@ -0,0 +1,21 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:625 15:19:18 "\n" diff --git a/experimental/ast/testdata/parser/package/empty.proto.stderr b/experimental/ast/testdata/parser/package/empty.proto.stderr new file mode 100755 index 00000000..372862ee --- /dev/null +++ b/experimental/ast/testdata/parser/package/empty.proto.stderr @@ -0,0 +1,7 @@ +warning: missing package declaration + --> testdata/parser/package/empty.proto + = note: omitting the `package` keyword implies an empty package + = help: using the empty package is discouraged + = help: explicitly add `package ...;` at the top of the file, after the syntax declaration + + encountered 1 warning diff --git a/experimental/ast/testdata/parser/package/eof_after_kw.proto b/experimental/ast/testdata/parser/package/eof_after_kw.proto new file mode 100644 index 00000000..41458702 --- /dev/null +++ b/experimental/ast/testdata/parser/package/eof_after_kw.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package diff --git a/experimental/ast/testdata/parser/package/eof_after_kw.proto.ast.json b/experimental/ast/testdata/parser/package/eof_after_kw.proto.ast.json new file mode 100644 index 00000000..ac92ef5f --- /dev/null +++ b/experimental/ast/testdata/parser/package/eof_after_kw.proto.ast.json @@ -0,0 +1,29 @@ +{ + "file": { + "path": "testdata/parser/package/eof_after_kw.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlCg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "span": { "start": 626, "end": 633 }, + "keywordSpan": { "start": 626, "end": 633 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/eof_after_kw.proto.lex.tsv b/experimental/ast/testdata/parser/package/eof_after_kw.proto.lex.tsv new file mode 100644 index 00000000..5b3a2e6a --- /dev/null +++ b/experimental/ast/testdata/parser/package/eof_after_kw.proto.lex.tsv @@ -0,0 +1,23 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 "\n" diff --git a/experimental/ast/testdata/parser/package/eof_after_kw.proto.stderr b/experimental/ast/testdata/parser/package/eof_after_kw.proto.stderr new file mode 100644 index 00000000..2efb4a77 --- /dev/null +++ b/experimental/ast/testdata/parser/package/eof_after_kw.proto.stderr @@ -0,0 +1,7 @@ +error: missing package name + --> testdata/parser/package/eof_after_kw.proto:17:1 + | +17 | package + | ^^^^^^^ help: add a path after `package` + +encountered 1 error diff --git a/experimental/ast/testdata/parser/package/extension.proto b/experimental/ast/testdata/parser/package/extension.proto new file mode 100644 index 00000000..4c610285 --- /dev/null +++ b/experimental/ast/testdata/parser/package/extension.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test.(extension.path).test; diff --git a/experimental/ast/testdata/parser/package/extension.proto.ast.json b/experimental/ast/testdata/parser/package/extension.proto.ast.json new file mode 100755 index 00000000..383d932e --- /dev/null +++ b/experimental/ast/testdata/parser/package/extension.proto.ast.json @@ -0,0 +1,65 @@ +{ + "file": { + "path": "testdata/parser/package/extension.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3QuKGV4dGVuc2lvbi5wYXRoKS50ZXN0Owo=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + }, + { + "extension": { + "components": [ + { + "ident": "extension", + "componentSpan": { "start": 640, "end": 649 } + }, + { + "ident": "path", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 650, "end": 654 }, + "separatorSpan": { "start": 649, "end": 650 } + } + ], + "span": { "start": 640, "end": 654 } + }, + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 640, "end": 654 }, + "separatorSpan": { "start": 638, "end": 639 } + }, + { + "ident": "test", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 656, "end": 660 }, + "separatorSpan": { "start": 655, "end": 656 } + } + ], + "span": { "start": 634, "end": 660 } + }, + "span": { "start": 626, "end": 661 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 660, "end": 661 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/extension.proto.lex.tsv b/experimental/ast/testdata/parser/package/extension.proto.lex.tsv new file mode 100755 index 00000000..638c3c8d --- /dev/null +++ b/experimental/ast/testdata/parser/package/extension.proto.lex.tsv @@ -0,0 +1,34 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 "." + 25:0x0019 TokenPunct 639:655 17:14:13 "(" + 26:0x001a TokenIdent 640:649 17:15:14 "extension" + 27:0x001b TokenPunct 649:650 17:24:23 "." + 28:0x001c TokenIdent 650:654 17:25:24 "path" + 29:0x001d TokenPunct 639:655 17:14:13 ")" + 30:0x001e TokenPunct 655:656 17:30:29 "." + 31:0x001f TokenIdent 656:660 17:31:30 "test" + 32:0x0020 TokenPunct 660:661 17:35:34 ";" + 33:0x0021 TokenSpace 661:662 17:36:35 "\n" diff --git a/experimental/ast/testdata/parser/package/extension.proto.stderr b/experimental/ast/testdata/parser/package/extension.proto.stderr new file mode 100755 index 00000000..ca988bfd --- /dev/null +++ b/experimental/ast/testdata/parser/package/extension.proto.stderr @@ -0,0 +1,7 @@ +error: package names cannot contain extension names + --> testdata/parser/package/extension.proto:17:14 + | +17 | package test.(extension.path).test; + | ^^^^^^^^^^^^^^^^ + +encountered 1 error diff --git a/experimental/ast/testdata/parser/package/host_qualified.proto b/experimental/ast/testdata/parser/package/host_qualified.proto new file mode 100644 index 00000000..2e6ce6d3 --- /dev/null +++ b/experimental/ast/testdata/parser/package/host_qualified.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package buf.build/test.test2; diff --git a/experimental/ast/testdata/parser/package/host_qualified.proto.ast.json b/experimental/ast/testdata/parser/package/host_qualified.proto.ast.json new file mode 100755 index 00000000..e103dc18 --- /dev/null +++ b/experimental/ast/testdata/parser/package/host_qualified.proto.ast.json @@ -0,0 +1,57 @@ +{ + "file": { + "path": "testdata/parser/package/host_qualified.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIGJ1Zi5idWlsZC90ZXN0LnRlc3QyOwo=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "buf", + "componentSpan": { "start": 634, "end": 637 } + }, + { + "ident": "build", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 638, "end": 643 }, + "separatorSpan": { "start": 637, "end": 638 } + }, + { + "ident": "test", + "separator": "SEPARATOR_SLASH", + "componentSpan": { "start": 644, "end": 648 }, + "separatorSpan": { "start": 643, "end": 644 } + }, + { + "ident": "test2", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 649, "end": 654 }, + "separatorSpan": { "start": 648, "end": 649 } + } + ], + "span": { "start": 634, "end": 654 } + }, + "span": { "start": 626, "end": 655 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 654, "end": 655 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/host_qualified.proto.lex.tsv b/experimental/ast/testdata/parser/package/host_qualified.proto.lex.tsv new file mode 100755 index 00000000..65d3962e --- /dev/null +++ b/experimental/ast/testdata/parser/package/host_qualified.proto.lex.tsv @@ -0,0 +1,32 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:637 17:9:8 "buf" + 24:0x0018 TokenPunct 637:638 17:12:11 "." + 25:0x0019 TokenIdent 638:643 17:13:12 "build" + 26:0x001a TokenPunct 643:644 17:18:17 "/" + 27:0x001b TokenIdent 644:648 17:19:18 "test" + 28:0x001c TokenPunct 648:649 17:23:22 "." + 29:0x001d TokenIdent 649:654 17:24:23 "test2" + 30:0x001e TokenPunct 654:655 17:29:28 ";" + 31:0x001f TokenSpace 655:656 17:30:29 "\n" diff --git a/experimental/ast/testdata/parser/package/host_qualified.proto.stderr b/experimental/ast/testdata/parser/package/host_qualified.proto.stderr new file mode 100755 index 00000000..255c3b35 --- /dev/null +++ b/experimental/ast/testdata/parser/package/host_qualified.proto.stderr @@ -0,0 +1,7 @@ +error: package names cannot contain slashes + --> testdata/parser/package/host_qualified.proto:17:18 + | +17 | package buf.build/test.test2; + | ^ + +encountered 1 error diff --git a/experimental/ast/testdata/parser/package/multi.proto b/experimental/ast/testdata/parser/package/multi.proto new file mode 100644 index 00000000..7ec9cb9d --- /dev/null +++ b/experimental/ast/testdata/parser/package/multi.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test.test2; diff --git a/experimental/ast/testdata/parser/package/multi.proto.ast.json b/experimental/ast/testdata/parser/package/multi.proto.ast.json new file mode 100755 index 00000000..fc2a4b63 --- /dev/null +++ b/experimental/ast/testdata/parser/package/multi.proto.ast.json @@ -0,0 +1,45 @@ +{ + "file": { + "path": "testdata/parser/package/multi.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3QudGVzdDI7Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + }, + { + "ident": "test2", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 639, "end": 644 }, + "separatorSpan": { "start": 638, "end": 639 } + } + ], + "span": { "start": 634, "end": 644 } + }, + "span": { "start": 626, "end": 645 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 644, "end": 645 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/multi.proto.lex.tsv b/experimental/ast/testdata/parser/package/multi.proto.lex.tsv new file mode 100755 index 00000000..c4c79662 --- /dev/null +++ b/experimental/ast/testdata/parser/package/multi.proto.lex.tsv @@ -0,0 +1,28 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 "." + 25:0x0019 TokenIdent 639:644 17:14:13 "test2" + 26:0x001a TokenPunct 644:645 17:19:18 ";" + 27:0x001b TokenSpace 645:646 17:20:19 "\n" diff --git a/experimental/ast/testdata/parser/package/no_path.proto b/experimental/ast/testdata/parser/package/no_path.proto new file mode 100644 index 00000000..f09faf95 --- /dev/null +++ b/experimental/ast/testdata/parser/package/no_path.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/package/no_path.proto.ast.json b/experimental/ast/testdata/parser/package/no_path.proto.ast.json new file mode 100755 index 00000000..740052b7 --- /dev/null +++ b/experimental/ast/testdata/parser/package/no_path.proto.ast.json @@ -0,0 +1,30 @@ +{ + "file": { + "path": "testdata/parser/package/no_path.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlOw==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "span": { "start": 626, "end": 634 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 633, "end": 634 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/no_path.proto.lex.tsv b/experimental/ast/testdata/parser/package/no_path.proto.lex.tsv new file mode 100755 index 00000000..602ff3b0 --- /dev/null +++ b/experimental/ast/testdata/parser/package/no_path.proto.lex.tsv @@ -0,0 +1,23 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenPunct 633:634 17:8:7 ";" diff --git a/experimental/ast/testdata/parser/package/no_path.proto.stderr b/experimental/ast/testdata/parser/package/no_path.proto.stderr new file mode 100755 index 00000000..f78f5ef5 --- /dev/null +++ b/experimental/ast/testdata/parser/package/no_path.proto.stderr @@ -0,0 +1,7 @@ +error: missing package name + --> testdata/parser/package/no_path.proto:17:1 + | +17 | package; + | ^^^^^^^^ help: add a path after `package` + +encountered 1 error diff --git a/experimental/ast/testdata/parser/package/options.proto b/experimental/ast/testdata/parser/package/options.proto new file mode 100644 index 00000000..e37b9e63 --- /dev/null +++ b/experimental/ast/testdata/parser/package/options.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test [(not.allowed) = "here"]; diff --git a/experimental/ast/testdata/parser/package/options.proto.ast.json b/experimental/ast/testdata/parser/package/options.proto.ast.json new file mode 100644 index 00000000..5b97c3f7 --- /dev/null +++ b/experimental/ast/testdata/parser/package/options.proto.ast.json @@ -0,0 +1,44 @@ +{ + "file": { + "path": "testdata/parser/package/options.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3QgWyhub3QuYWxsb3dlZCkgPSAiaGVyZSJdOwo=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 663 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 639, "end": 663 } + } + }, + { + "empty": { + "span": { "start": 663, "end": 664 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/options.proto.lex.tsv b/experimental/ast/testdata/parser/package/options.proto.lex.tsv new file mode 100644 index 00000000..14417221 --- /dev/null +++ b/experimental/ast/testdata/parser/package/options.proto.lex.tsv @@ -0,0 +1,38 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenSpace 638:639 17:13:12 " " + 25:0x0019 TokenPunct 639:663 17:14:13 "[" + 26:0x001a TokenPunct 640:653 17:15:14 "(" + 27:0x001b TokenIdent 641:644 17:16:15 "not" + 28:0x001c TokenPunct 644:645 17:19:18 "." + 29:0x001d TokenIdent 645:652 17:20:19 "allowed" + 30:0x001e TokenPunct 640:653 17:15:14 ")" + 31:0x001f TokenSpace 653:654 17:28:27 " " + 32:0x0020 TokenPunct 654:655 17:29:28 "=" + 33:0x0021 TokenSpace 655:656 17:30:29 " " + 34:0x0022 TokenString 656:662 17:31:30 "here" "\"here\"" + 35:0x0023 TokenPunct 639:663 17:14:13 "]" + 36:0x0024 TokenPunct 663:664 17:38:37 ";" + 37:0x0025 TokenSpace 664:665 17:39:38 "\n" diff --git a/experimental/ast/testdata/parser/package/single.proto b/experimental/ast/testdata/parser/package/single.proto new file mode 100644 index 00000000..9ff43bb3 --- /dev/null +++ b/experimental/ast/testdata/parser/package/single.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; diff --git a/experimental/ast/testdata/parser/package/single.proto.ast.json b/experimental/ast/testdata/parser/package/single.proto.ast.json new file mode 100755 index 00000000..248b4ca9 --- /dev/null +++ b/experimental/ast/testdata/parser/package/single.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/package/single.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/package/single.proto.lex.tsv b/experimental/ast/testdata/parser/package/single.proto.lex.tsv new file mode 100755 index 00000000..377e1cf8 --- /dev/null +++ b/experimental/ast/testdata/parser/package/single.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:640 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/range/escapes.proto b/experimental/ast/testdata/parser/range/escapes.proto new file mode 100644 index 00000000..dac9827f --- /dev/null +++ b/experimental/ast/testdata/parser/range/escapes.proto @@ -0,0 +1,22 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +message Foo { + reserved "foo" "bar"; + reserved "foo\n", "b\x61r"; +} \ No newline at end of file diff --git a/experimental/ast/testdata/parser/range/escapes.proto.ast.json b/experimental/ast/testdata/parser/range/escapes.proto.ast.json new file mode 100644 index 00000000..37e0e8bd --- /dev/null +++ b/experimental/ast/testdata/parser/range/escapes.proto.ast.json @@ -0,0 +1,98 @@ +{ + "file": { + "path": "testdata/parser/range/escapes.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgptZXNzYWdlIEZvbyB7CiAgICByZXNlcnZlZCAiZm9vIiAiYmFyIjsKICAgIHJlc2VydmVkICJmb29cbiIsICJiXHg2MXIiOwp9" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "def": { + "kind": "KIND_MESSAGE", + "name": { + "components": [ + { + "ident": "Foo", + "componentSpan": { "start": 649, "end": 652 } + } + ], + "span": { "start": 649, "end": 652 } + }, + "body": { + "decls": [ + { + "range": { + "kind": "KIND_RESERVED", + "ranges": [ + { + "literal": { + "stringValue": "foobar", + "span": { "start": 668, "end": 679 } + } + } + ], + "span": { "start": 659, "end": 680 }, + "keywordSpan": { "start": 659, "end": 667 }, + "semicolonSpan": { "start": 679, "end": 680 } + } + }, + { + "range": { + "kind": "KIND_RESERVED", + "ranges": [ + { + "literal": { + "stringValue": "foo\n", + "span": { "start": 694, "end": 701 } + } + }, + { + "literal": { + "stringValue": "bar", + "span": { "start": 703, "end": 711 } + } + } + ], + "span": { "start": 685, "end": 712 }, + "keywordSpan": { "start": 685, "end": 693 }, + "semicolonSpan": { "start": 711, "end": 712 } + } + } + ], + "span": { "start": 653, "end": 714 } + }, + "span": { "start": 641, "end": 714 }, + "keywordSpan": { "start": 641, "end": 648 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/range/escapes.proto.lex.tsv b/experimental/ast/testdata/parser/range/escapes.proto.lex.tsv new file mode 100644 index 00000000..b4ad0fae --- /dev/null +++ b/experimental/ast/testdata/parser/range/escapes.proto.lex.tsv @@ -0,0 +1,48 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:648 19:1:0 "message" + 27:0x001b TokenSpace 648:649 19:8:7 " " + 28:0x001c TokenIdent 649:652 19:9:8 "Foo" + 29:0x001d TokenSpace 652:653 19:12:11 " " + 30:0x001e TokenPunct 653:714 19:13:12 "{" + 31:0x001f TokenSpace 654:659 19:14:13 "\n " + 32:0x0020 TokenIdent 659:667 20:5:4 "reserved" + 33:0x0021 TokenSpace 667:668 20:13:12 " " + 34:0x0022 TokenString 668:679 20:14:13 "foobar" "\"foo\"" + 35:0x0023 TokenSpace 673:674 20:19:18 " " + 36:0x0024 TokenString 668:679 20:14:13 "bar" "\"bar\"" + 37:0x0025 TokenPunct 679:680 20:25:24 ";" + 38:0x0026 TokenSpace 680:685 20:26:25 "\n " + 39:0x0027 TokenIdent 685:693 21:5:4 "reserved" + 40:0x0028 TokenSpace 693:694 21:13:12 " " + 41:0x0029 TokenString 694:701 21:14:13 "foo\n" "\"foo\\n\"" + 42:0x002a TokenPunct 701:702 21:21:20 "," + 43:0x002b TokenSpace 702:703 21:22:21 " " + 44:0x002c TokenString 703:711 21:23:22 "bar" "\"b\\x61r\"" + 45:0x002d TokenPunct 711:712 21:31:30 ";" + 46:0x002e TokenSpace 712:713 21:32:31 "\n" + 47:0x002f TokenPunct 653:714 19:13:12 "}" diff --git a/experimental/ast/testdata/parser/range/escapes.proto.stderr b/experimental/ast/testdata/parser/range/escapes.proto.stderr new file mode 100644 index 00000000..0d7d2ea1 --- /dev/null +++ b/experimental/ast/testdata/parser/range/escapes.proto.stderr @@ -0,0 +1,19 @@ +warning: reserved field name should be a single, escape-less string + --> testdata/parser/range/escapes.proto:20:14 + | +20 | reserved "foo" "bar"; + | ^^^^^^^^^^^ help: change this to "foobar" + +error: non-ASCII identifiers are not allowed + --> testdata/parser/range/escapes.proto:21:14 + | +21 | reserved "foo\n", "b\x61r"; + | ^^^^^^^ + +warning: reserved field name should be a single, escape-less string + --> testdata/parser/range/escapes.proto:21:23 + | +21 | reserved "foo\n", "b\x61r"; + | ^^^^^^^^ help: change this to "bar" + +encountered 1 error and 2 warnings diff --git a/experimental/ast/testdata/parser/range/extension_names.proto b/experimental/ast/testdata/parser/range/extension_names.proto new file mode 100644 index 00000000..9f7868f8 --- /dev/null +++ b/experimental/ast/testdata/parser/range/extension_names.proto @@ -0,0 +1,21 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +message Foo { + extensions foo, "bar"; +} \ No newline at end of file diff --git a/experimental/ast/testdata/parser/range/extension_names.proto.ast.json b/experimental/ast/testdata/parser/range/extension_names.proto.ast.json new file mode 100644 index 00000000..e811d68c --- /dev/null +++ b/experimental/ast/testdata/parser/range/extension_names.proto.ast.json @@ -0,0 +1,87 @@ +{ + "file": { + "path": "testdata/parser/range/extension_names.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgptZXNzYWdlIEZvbyB7CiAgICBleHRlbnNpb25zIGZvbywgImJhciI7Cn0=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "def": { + "kind": "KIND_MESSAGE", + "name": { + "components": [ + { + "ident": "Foo", + "componentSpan": { "start": 649, "end": 652 } + } + ], + "span": { "start": 649, "end": 652 } + }, + "body": { + "decls": [ + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "path": { + "components": [ + { + "ident": "foo", + "componentSpan": { "start": 670, "end": 673 } + } + ], + "span": { "start": 670, "end": 673 } + } + }, + { + "literal": { + "stringValue": "bar", + "span": { "start": 675, "end": 680 } + } + } + ], + "span": { "start": 659, "end": 681 }, + "keywordSpan": { "start": 659, "end": 669 }, + "semicolonSpan": { "start": 680, "end": 681 } + } + } + ], + "span": { "start": 653, "end": 683 } + }, + "span": { "start": 641, "end": 683 }, + "keywordSpan": { "start": 641, "end": 648 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/range/extension_names.proto.lex.tsv b/experimental/ast/testdata/parser/range/extension_names.proto.lex.tsv new file mode 100644 index 00000000..e90ee228 --- /dev/null +++ b/experimental/ast/testdata/parser/range/extension_names.proto.lex.tsv @@ -0,0 +1,41 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:648 19:1:0 "message" + 27:0x001b TokenSpace 648:649 19:8:7 " " + 28:0x001c TokenIdent 649:652 19:9:8 "Foo" + 29:0x001d TokenSpace 652:653 19:12:11 " " + 30:0x001e TokenPunct 653:683 19:13:12 "{" + 31:0x001f TokenSpace 654:659 19:14:13 "\n " + 32:0x0020 TokenIdent 659:669 20:5:4 "extensions" + 33:0x0021 TokenSpace 669:670 20:15:14 " " + 34:0x0022 TokenIdent 670:673 20:16:15 "foo" + 35:0x0023 TokenPunct 673:674 20:19:18 "," + 36:0x0024 TokenSpace 674:675 20:20:19 " " + 37:0x0025 TokenString 675:680 20:21:20 "bar" "\"bar\"" + 38:0x0026 TokenPunct 680:681 20:26:25 ";" + 39:0x0027 TokenSpace 681:682 20:27:26 "\n" + 40:0x0028 TokenPunct 653:683 19:13:12 "}" diff --git a/experimental/ast/testdata/parser/range/extension_names.proto.stderr b/experimental/ast/testdata/parser/range/extension_names.proto.stderr new file mode 100644 index 00000000..552341c9 --- /dev/null +++ b/experimental/ast/testdata/parser/range/extension_names.proto.stderr @@ -0,0 +1,17 @@ +error: mismatched types + --> testdata/parser/range/extension_names.proto:20:16 + | +20 | extensions foo, "bar"; + | ---------- ^^^ expected `int32` or `int32` range + | | + | expected due to this + +error: mismatched types + --> testdata/parser/range/extension_names.proto:20:21 + | +20 | extensions foo, "bar"; + | ---------- ^^^^^ expected `int32` or `int32` range + | | + | expected due to this + +encountered 2 errors diff --git a/experimental/ast/testdata/parser/range/invalid_exprs.proto b/experimental/ast/testdata/parser/range/invalid_exprs.proto new file mode 100644 index 00000000..5675edd1 --- /dev/null +++ b/experimental/ast/testdata/parser/range/invalid_exprs.proto @@ -0,0 +1,22 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +message Foo { + extensions {}; + reserved {}; +} \ No newline at end of file diff --git a/experimental/ast/testdata/parser/range/invalid_exprs.proto.ast.json b/experimental/ast/testdata/parser/range/invalid_exprs.proto.ast.json new file mode 100644 index 00000000..b8208fa8 --- /dev/null +++ b/experimental/ast/testdata/parser/range/invalid_exprs.proto.ast.json @@ -0,0 +1,90 @@ +{ + "file": { + "path": "testdata/parser/range/invalid_exprs.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgptZXNzYWdlIEZvbyB7CiAgICBleHRlbnNpb25zIHt9OwogICAgcmVzZXJ2ZWQge307Cn0=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "def": { + "kind": "KIND_MESSAGE", + "name": { + "components": [ + { + "ident": "Foo", + "componentSpan": { "start": 649, "end": 652 } + } + ], + "span": { "start": 649, "end": 652 } + }, + "body": { + "decls": [ + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "dict": { + "span": { "start": 670, "end": 672 } + } + } + ], + "span": { "start": 659, "end": 673 }, + "keywordSpan": { "start": 659, "end": 669 }, + "semicolonSpan": { "start": 672, "end": 673 } + } + }, + { + "range": { + "kind": "KIND_RESERVED", + "ranges": [ + { + "dict": { + "span": { "start": 687, "end": 689 } + } + } + ], + "span": { "start": 678, "end": 690 }, + "keywordSpan": { "start": 678, "end": 686 }, + "semicolonSpan": { "start": 689, "end": 690 } + } + } + ], + "span": { "start": 653, "end": 692 } + }, + "span": { "start": 641, "end": 692 }, + "keywordSpan": { "start": 641, "end": 648 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/range/invalid_exprs.proto.lex.tsv b/experimental/ast/testdata/parser/range/invalid_exprs.proto.lex.tsv new file mode 100644 index 00000000..224df9ca --- /dev/null +++ b/experimental/ast/testdata/parser/range/invalid_exprs.proto.lex.tsv @@ -0,0 +1,45 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:648 19:1:0 "message" + 27:0x001b TokenSpace 648:649 19:8:7 " " + 28:0x001c TokenIdent 649:652 19:9:8 "Foo" + 29:0x001d TokenSpace 652:653 19:12:11 " " + 30:0x001e TokenPunct 653:692 19:13:12 "{" + 31:0x001f TokenSpace 654:659 19:14:13 "\n " + 32:0x0020 TokenIdent 659:669 20:5:4 "extensions" + 33:0x0021 TokenSpace 669:670 20:15:14 " " + 34:0x0022 TokenPunct 670:672 20:16:15 "{" + 35:0x0023 TokenPunct 670:672 20:16:15 "}" + 36:0x0024 TokenPunct 672:673 20:18:17 ";" + 37:0x0025 TokenSpace 673:678 20:19:18 "\n " + 38:0x0026 TokenIdent 678:686 21:5:4 "reserved" + 39:0x0027 TokenSpace 686:687 21:13:12 " " + 40:0x0028 TokenPunct 687:689 21:14:13 "{" + 41:0x0029 TokenPunct 687:689 21:14:13 "}" + 42:0x002a TokenPunct 689:690 21:16:15 ";" + 43:0x002b TokenSpace 690:691 21:17:16 "\n" + 44:0x002c TokenPunct 653:692 19:13:12 "}" diff --git a/experimental/ast/testdata/parser/range/invalid_exprs.proto.stderr b/experimental/ast/testdata/parser/range/invalid_exprs.proto.stderr new file mode 100644 index 00000000..5ff1aeca --- /dev/null +++ b/experimental/ast/testdata/parser/range/invalid_exprs.proto.stderr @@ -0,0 +1,17 @@ +error: mismatched types + --> testdata/parser/range/invalid_exprs.proto:20:16 + | +20 | extensions {}; + | ---------- ^^ expected `int32` or `int32` range + | | + | expected due to this + +error: mismatched types + --> testdata/parser/range/invalid_exprs.proto:21:14 + | +21 | reserved {}; + | -------- ^^ expected `int32`, `int32` range, `string`, or identifier + | | + | expected due to this + +encountered 2 errors diff --git a/experimental/ast/testdata/parser/range/ok.proto b/experimental/ast/testdata/parser/range/ok.proto new file mode 100644 index 00000000..6ce5e087 --- /dev/null +++ b/experimental/ast/testdata/parser/range/ok.proto @@ -0,0 +1,39 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +message Foo { + extensions 1; + extensions 1 to 2; + extensions -5 to 0x20; + extensions 0 to max; + extensions 1, 2, 3, 4 to 5, 6; + + reserved 1, "foo"; + reserved 2, 3, 5 to 7, foo, "bar"; +} + +enum Foo { + extensions 1; + extensions 1 to 2; + extensions -5 to 0x20; + extensions 0 to max; + extensions 1, 2, 3, 4 to 5, 6; + + reserved 1, "foo"; + reserved 2, 3, 5 to 7, foo, "bar"; +} \ No newline at end of file diff --git a/experimental/ast/testdata/parser/range/ok.proto.ast.json b/experimental/ast/testdata/parser/range/ok.proto.ast.json new file mode 100644 index 00000000..33c9b49b --- /dev/null +++ b/experimental/ast/testdata/parser/range/ok.proto.ast.json @@ -0,0 +1,567 @@ +{ + "file": { + "path": "testdata/parser/range/ok.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgptZXNzYWdlIEZvbyB7CiAgICBleHRlbnNpb25zIDE7CiAgICBleHRlbnNpb25zIDEgdG8gMjsKICAgIGV4dGVuc2lvbnMgLTUgdG8gMHgyMDsKICAgIGV4dGVuc2lvbnMgMCB0byBtYXg7CiAgICBleHRlbnNpb25zIDEsIDIsIDMsIDQgdG8gNSwgNjsKICAgIAogICAgcmVzZXJ2ZWQgMSwgImZvbyI7CiAgICByZXNlcnZlZCAyLCAzLCA1IHRvIDcsIGZvbywgImJhciI7Cn0KCmVudW0gRm9vIHsKICAgIGV4dGVuc2lvbnMgMTsKICAgIGV4dGVuc2lvbnMgMSB0byAyOwogICAgZXh0ZW5zaW9ucyAtNSB0byAweDIwOwogICAgZXh0ZW5zaW9ucyAwIHRvIG1heDsKICAgIGV4dGVuc2lvbnMgMSwgMiwgMywgNCB0byA1LCA2OwogICAgCiAgICByZXNlcnZlZCAxLCAiZm9vIjsKICAgIHJlc2VydmVkIDIsIDMsIDUgdG8gNywgZm9vLCAiYmFyIjsKfQ==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "def": { + "kind": "KIND_MESSAGE", + "name": { + "components": [ + { + "ident": "Foo", + "componentSpan": { "start": 649, "end": 652 } + } + ], + "span": { "start": 649, "end": 652 } + }, + "body": { + "decls": [ + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "literal": { + "intValue": "1", + "span": { "start": 670, "end": 671 } + } + } + ], + "span": { "start": 659, "end": 672 }, + "keywordSpan": { "start": 659, "end": 669 }, + "semicolonSpan": { "start": 671, "end": 672 } + } + }, + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "range": { + "start": { + "literal": { + "intValue": "1", + "span": { "start": 688, "end": 689 } + } + }, + "end": { + "literal": { + "intValue": "2", + "span": { "start": 693, "end": 694 } + } + }, + "span": { "start": 688, "end": 694 }, + "toSpan": { "start": 690, "end": 692 } + } + } + ], + "span": { "start": 677, "end": 695 }, + "keywordSpan": { "start": 677, "end": 687 }, + "semicolonSpan": { "start": 694, "end": 695 } + } + }, + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "range": { + "start": { + "prefixed": { + "prefix": "PREFIX_MINUS", + "expr": { + "literal": { + "intValue": "5", + "span": { "start": 712, "end": 713 } + } + }, + "span": { "start": 711, "end": 713 }, + "prefixSpan": { "start": 711, "end": 712 } + } + }, + "end": { + "literal": { + "intValue": "32", + "span": { "start": 717, "end": 721 } + } + }, + "span": { "start": 711, "end": 721 }, + "toSpan": { "start": 714, "end": 716 } + } + } + ], + "span": { "start": 700, "end": 722 }, + "keywordSpan": { "start": 700, "end": 710 }, + "semicolonSpan": { "start": 721, "end": 722 } + } + }, + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "range": { + "start": { + "literal": { + "intValue": "0", + "span": { "start": 738, "end": 739 } + } + }, + "end": { + "path": { + "components": [ + { + "ident": "max", + "componentSpan": { "start": 743, "end": 746 } + } + ], + "span": { "start": 743, "end": 746 } + } + }, + "span": { "start": 738, "end": 746 }, + "toSpan": { "start": 740, "end": 742 } + } + } + ], + "span": { "start": 727, "end": 747 }, + "keywordSpan": { "start": 727, "end": 737 }, + "semicolonSpan": { "start": 746, "end": 747 } + } + }, + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "literal": { + "intValue": "1", + "span": { "start": 763, "end": 764 } + } + }, + { + "literal": { + "intValue": "2", + "span": { "start": 766, "end": 767 } + } + }, + { + "literal": { + "intValue": "3", + "span": { "start": 769, "end": 770 } + } + }, + { + "range": { + "start": { + "literal": { + "intValue": "4", + "span": { "start": 772, "end": 773 } + } + }, + "end": { + "literal": { + "intValue": "5", + "span": { "start": 777, "end": 778 } + } + }, + "span": { "start": 772, "end": 778 }, + "toSpan": { "start": 774, "end": 776 } + } + }, + { + "literal": { + "intValue": "6", + "span": { "start": 780, "end": 781 } + } + } + ], + "span": { "start": 752, "end": 782 }, + "keywordSpan": { "start": 752, "end": 762 }, + "semicolonSpan": { "start": 781, "end": 782 } + } + }, + { + "range": { + "kind": "KIND_RESERVED", + "ranges": [ + { + "literal": { + "intValue": "1", + "span": { "start": 801, "end": 802 } + } + }, + { + "literal": { + "stringValue": "foo", + "span": { "start": 804, "end": 809 } + } + } + ], + "span": { "start": 792, "end": 810 }, + "keywordSpan": { "start": 792, "end": 800 }, + "semicolonSpan": { "start": 809, "end": 810 } + } + }, + { + "range": { + "kind": "KIND_RESERVED", + "ranges": [ + { + "literal": { + "intValue": "2", + "span": { "start": 824, "end": 825 } + } + }, + { + "literal": { + "intValue": "3", + "span": { "start": 827, "end": 828 } + } + }, + { + "range": { + "start": { + "literal": { + "intValue": "5", + "span": { "start": 830, "end": 831 } + } + }, + "end": { + "literal": { + "intValue": "7", + "span": { "start": 835, "end": 836 } + } + }, + "span": { "start": 830, "end": 836 }, + "toSpan": { "start": 832, "end": 834 } + } + }, + { + "path": { + "components": [ + { + "ident": "foo", + "componentSpan": { "start": 838, "end": 841 } + } + ], + "span": { "start": 838, "end": 841 } + } + }, + { + "literal": { + "stringValue": "bar", + "span": { "start": 843, "end": 848 } + } + } + ], + "span": { "start": 815, "end": 849 }, + "keywordSpan": { "start": 815, "end": 823 }, + "semicolonSpan": { "start": 848, "end": 849 } + } + } + ], + "span": { "start": 653, "end": 851 } + }, + "span": { "start": 641, "end": 851 }, + "keywordSpan": { "start": 641, "end": 648 } + } + }, + { + "def": { + "kind": "KIND_ENUM", + "name": { + "components": [ + { + "ident": "Foo", + "componentSpan": { "start": 858, "end": 861 } + } + ], + "span": { "start": 858, "end": 861 } + }, + "body": { + "decls": [ + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "literal": { + "intValue": "1", + "span": { "start": 879, "end": 880 } + } + } + ], + "span": { "start": 868, "end": 881 }, + "keywordSpan": { "start": 868, "end": 878 }, + "semicolonSpan": { "start": 880, "end": 881 } + } + }, + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "range": { + "start": { + "literal": { + "intValue": "1", + "span": { "start": 897, "end": 898 } + } + }, + "end": { + "literal": { + "intValue": "2", + "span": { "start": 902, "end": 903 } + } + }, + "span": { "start": 897, "end": 903 }, + "toSpan": { "start": 899, "end": 901 } + } + } + ], + "span": { "start": 886, "end": 904 }, + "keywordSpan": { "start": 886, "end": 896 }, + "semicolonSpan": { "start": 903, "end": 904 } + } + }, + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "range": { + "start": { + "prefixed": { + "prefix": "PREFIX_MINUS", + "expr": { + "literal": { + "intValue": "5", + "span": { "start": 921, "end": 922 } + } + }, + "span": { "start": 920, "end": 922 }, + "prefixSpan": { "start": 920, "end": 921 } + } + }, + "end": { + "literal": { + "intValue": "32", + "span": { "start": 926, "end": 930 } + } + }, + "span": { "start": 920, "end": 930 }, + "toSpan": { "start": 923, "end": 925 } + } + } + ], + "span": { "start": 909, "end": 931 }, + "keywordSpan": { "start": 909, "end": 919 }, + "semicolonSpan": { "start": 930, "end": 931 } + } + }, + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "range": { + "start": { + "literal": { + "intValue": "0", + "span": { "start": 947, "end": 948 } + } + }, + "end": { + "path": { + "components": [ + { + "ident": "max", + "componentSpan": { "start": 952, "end": 955 } + } + ], + "span": { "start": 952, "end": 955 } + } + }, + "span": { "start": 947, "end": 955 }, + "toSpan": { "start": 949, "end": 951 } + } + } + ], + "span": { "start": 936, "end": 956 }, + "keywordSpan": { "start": 936, "end": 946 }, + "semicolonSpan": { "start": 955, "end": 956 } + } + }, + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "literal": { + "intValue": "1", + "span": { "start": 972, "end": 973 } + } + }, + { + "literal": { + "intValue": "2", + "span": { "start": 975, "end": 976 } + } + }, + { + "literal": { + "intValue": "3", + "span": { "start": 978, "end": 979 } + } + }, + { + "range": { + "start": { + "literal": { + "intValue": "4", + "span": { "start": 981, "end": 982 } + } + }, + "end": { + "literal": { + "intValue": "5", + "span": { "start": 986, "end": 987 } + } + }, + "span": { "start": 981, "end": 987 }, + "toSpan": { "start": 983, "end": 985 } + } + }, + { + "literal": { + "intValue": "6", + "span": { "start": 989, "end": 990 } + } + } + ], + "span": { "start": 961, "end": 991 }, + "keywordSpan": { "start": 961, "end": 971 }, + "semicolonSpan": { "start": 990, "end": 991 } + } + }, + { + "range": { + "kind": "KIND_RESERVED", + "ranges": [ + { + "literal": { + "intValue": "1", + "span": { "start": 1010, "end": 1011 } + } + }, + { + "literal": { + "stringValue": "foo", + "span": { "start": 1013, "end": 1018 } + } + } + ], + "span": { "start": 1001, "end": 1019 }, + "keywordSpan": { "start": 1001, "end": 1009 }, + "semicolonSpan": { "start": 1018, "end": 1019 } + } + }, + { + "range": { + "kind": "KIND_RESERVED", + "ranges": [ + { + "literal": { + "intValue": "2", + "span": { "start": 1033, "end": 1034 } + } + }, + { + "literal": { + "intValue": "3", + "span": { "start": 1036, "end": 1037 } + } + }, + { + "range": { + "start": { + "literal": { + "intValue": "5", + "span": { "start": 1039, "end": 1040 } + } + }, + "end": { + "literal": { + "intValue": "7", + "span": { "start": 1044, "end": 1045 } + } + }, + "span": { "start": 1039, "end": 1045 }, + "toSpan": { "start": 1041, "end": 1043 } + } + }, + { + "path": { + "components": [ + { + "ident": "foo", + "componentSpan": { "start": 1047, "end": 1050 } + } + ], + "span": { "start": 1047, "end": 1050 } + } + }, + { + "literal": { + "stringValue": "bar", + "span": { "start": 1052, "end": 1057 } + } + } + ], + "span": { "start": 1024, "end": 1058 }, + "keywordSpan": { "start": 1024, "end": 1032 }, + "semicolonSpan": { "start": 1057, "end": 1058 } + } + } + ], + "span": { "start": 862, "end": 1060 } + }, + "span": { "start": 853, "end": 1060 }, + "keywordSpan": { "start": 853, "end": 857 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/range/ok.proto.lex.tsv b/experimental/ast/testdata/parser/range/ok.proto.lex.tsv new file mode 100644 index 00000000..177ad26f --- /dev/null +++ b/experimental/ast/testdata/parser/range/ok.proto.lex.tsv @@ -0,0 +1,207 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:648 19:1:0 "message" + 27:0x001b TokenSpace 648:649 19:8:7 " " + 28:0x001c TokenIdent 649:652 19:9:8 "Foo" + 29:0x001d TokenSpace 652:653 19:12:11 " " + 30:0x001e TokenPunct 653:851 19:13:12 "{" + 31:0x001f TokenSpace 654:659 19:14:13 "\n " + 32:0x0020 TokenIdent 659:669 20:5:4 "extensions" + 33:0x0021 TokenSpace 669:670 20:15:14 " " + 34:0x0022 TokenNumber 670:671 20:16:15 1 "1" + 35:0x0023 TokenPunct 671:672 20:17:16 ";" + 36:0x0024 TokenSpace 672:677 20:18:17 "\n " + 37:0x0025 TokenIdent 677:687 21:5:4 "extensions" + 38:0x0026 TokenSpace 687:688 21:15:14 " " + 39:0x0027 TokenNumber 688:689 21:16:15 1 "1" + 40:0x0028 TokenSpace 689:690 21:17:16 " " + 41:0x0029 TokenIdent 690:692 21:18:17 "to" + 42:0x002a TokenSpace 692:693 21:20:19 " " + 43:0x002b TokenNumber 693:694 21:21:20 2 "2" + 44:0x002c TokenPunct 694:695 21:22:21 ";" + 45:0x002d TokenSpace 695:700 21:23:22 "\n " + 46:0x002e TokenIdent 700:710 22:5:4 "extensions" + 47:0x002f TokenSpace 710:711 22:15:14 " " + 48:0x0030 TokenPunct 711:712 22:16:15 "-" + 49:0x0031 TokenNumber 712:713 22:17:16 5 "5" + 50:0x0032 TokenSpace 713:714 22:18:17 " " + 51:0x0033 TokenIdent 714:716 22:19:18 "to" + 52:0x0034 TokenSpace 716:717 22:21:20 " " + 53:0x0035 TokenNumber 717:721 22:22:21 32 "0x20" + 54:0x0036 TokenPunct 721:722 22:26:25 ";" + 55:0x0037 TokenSpace 722:727 22:27:26 "\n " + 56:0x0038 TokenIdent 727:737 23:5:4 "extensions" + 57:0x0039 TokenSpace 737:738 23:15:14 " " + 58:0x003a TokenNumber 738:739 23:16:15 0 "0" + 59:0x003b TokenSpace 739:740 23:17:16 " " + 60:0x003c TokenIdent 740:742 23:18:17 "to" + 61:0x003d TokenSpace 742:743 23:20:19 " " + 62:0x003e TokenIdent 743:746 23:21:20 "max" + 63:0x003f TokenPunct 746:747 23:24:23 ";" + 64:0x0040 TokenSpace 747:752 23:25:24 "\n " + 65:0x0041 TokenIdent 752:762 24:5:4 "extensions" + 66:0x0042 TokenSpace 762:763 24:15:14 " " + 67:0x0043 TokenNumber 763:764 24:16:15 1 "1" + 68:0x0044 TokenPunct 764:765 24:17:16 "," + 69:0x0045 TokenSpace 765:766 24:18:17 " " + 70:0x0046 TokenNumber 766:767 24:19:18 2 "2" + 71:0x0047 TokenPunct 767:768 24:20:19 "," + 72:0x0048 TokenSpace 768:769 24:21:20 " " + 73:0x0049 TokenNumber 769:770 24:22:21 3 "3" + 74:0x004a TokenPunct 770:771 24:23:22 "," + 75:0x004b TokenSpace 771:772 24:24:23 " " + 76:0x004c TokenNumber 772:773 24:25:24 4 "4" + 77:0x004d TokenSpace 773:774 24:26:25 " " + 78:0x004e TokenIdent 774:776 24:27:26 "to" + 79:0x004f TokenSpace 776:777 24:29:28 " " + 80:0x0050 TokenNumber 777:778 24:30:29 5 "5" + 81:0x0051 TokenPunct 778:779 24:31:30 "," + 82:0x0052 TokenSpace 779:780 24:32:31 " " + 83:0x0053 TokenNumber 780:781 24:33:32 6 "6" + 84:0x0054 TokenPunct 781:782 24:34:33 ";" + 85:0x0055 TokenSpace 782:792 24:35:34 "\n \n " + 86:0x0056 TokenIdent 792:800 26:5:4 "reserved" + 87:0x0057 TokenSpace 800:801 26:13:12 " " + 88:0x0058 TokenNumber 801:802 26:14:13 1 "1" + 89:0x0059 TokenPunct 802:803 26:15:14 "," + 90:0x005a TokenSpace 803:804 26:16:15 " " + 91:0x005b TokenString 804:809 26:17:16 "foo" "\"foo\"" + 92:0x005c TokenPunct 809:810 26:22:21 ";" + 93:0x005d TokenSpace 810:815 26:23:22 "\n " + 94:0x005e TokenIdent 815:823 27:5:4 "reserved" + 95:0x005f TokenSpace 823:824 27:13:12 " " + 96:0x0060 TokenNumber 824:825 27:14:13 2 "2" + 97:0x0061 TokenPunct 825:826 27:15:14 "," + 98:0x0062 TokenSpace 826:827 27:16:15 " " + 99:0x0063 TokenNumber 827:828 27:17:16 3 "3" + 100:0x0064 TokenPunct 828:829 27:18:17 "," + 101:0x0065 TokenSpace 829:830 27:19:18 " " + 102:0x0066 TokenNumber 830:831 27:20:19 5 "5" + 103:0x0067 TokenSpace 831:832 27:21:20 " " + 104:0x0068 TokenIdent 832:834 27:22:21 "to" + 105:0x0069 TokenSpace 834:835 27:24:23 " " + 106:0x006a TokenNumber 835:836 27:25:24 7 "7" + 107:0x006b TokenPunct 836:837 27:26:25 "," + 108:0x006c TokenSpace 837:838 27:27:26 " " + 109:0x006d TokenIdent 838:841 27:28:27 "foo" + 110:0x006e TokenPunct 841:842 27:31:30 "," + 111:0x006f TokenSpace 842:843 27:32:31 " " + 112:0x0070 TokenString 843:848 27:33:32 "bar" "\"bar\"" + 113:0x0071 TokenPunct 848:849 27:38:37 ";" + 114:0x0072 TokenSpace 849:850 27:39:38 "\n" + 115:0x0073 TokenPunct 653:851 19:13:12 "}" + 116:0x0074 TokenSpace 851:853 28:2:1 "\n\n" + 117:0x0075 TokenIdent 853:857 30:1:0 "enum" + 118:0x0076 TokenSpace 857:858 30:5:4 " " + 119:0x0077 TokenIdent 858:861 30:6:5 "Foo" + 120:0x0078 TokenSpace 861:862 30:9:8 " " + 121:0x0079 TokenPunct 862:1060 30:10:9 "{" + 122:0x007a TokenSpace 863:868 30:11:10 "\n " + 123:0x007b TokenIdent 868:878 31:5:4 "extensions" + 124:0x007c TokenSpace 878:879 31:15:14 " " + 125:0x007d TokenNumber 879:880 31:16:15 1 "1" + 126:0x007e TokenPunct 880:881 31:17:16 ";" + 127:0x007f TokenSpace 881:886 31:18:17 "\n " + 128:0x0080 TokenIdent 886:896 32:5:4 "extensions" + 129:0x0081 TokenSpace 896:897 32:15:14 " " + 130:0x0082 TokenNumber 897:898 32:16:15 1 "1" + 131:0x0083 TokenSpace 898:899 32:17:16 " " + 132:0x0084 TokenIdent 899:901 32:18:17 "to" + 133:0x0085 TokenSpace 901:902 32:20:19 " " + 134:0x0086 TokenNumber 902:903 32:21:20 2 "2" + 135:0x0087 TokenPunct 903:904 32:22:21 ";" + 136:0x0088 TokenSpace 904:909 32:23:22 "\n " + 137:0x0089 TokenIdent 909:919 33:5:4 "extensions" + 138:0x008a TokenSpace 919:920 33:15:14 " " + 139:0x008b TokenPunct 920:921 33:16:15 "-" + 140:0x008c TokenNumber 921:922 33:17:16 5 "5" + 141:0x008d TokenSpace 922:923 33:18:17 " " + 142:0x008e TokenIdent 923:925 33:19:18 "to" + 143:0x008f TokenSpace 925:926 33:21:20 " " + 144:0x0090 TokenNumber 926:930 33:22:21 32 "0x20" + 145:0x0091 TokenPunct 930:931 33:26:25 ";" + 146:0x0092 TokenSpace 931:936 33:27:26 "\n " + 147:0x0093 TokenIdent 936:946 34:5:4 "extensions" + 148:0x0094 TokenSpace 946:947 34:15:14 " " + 149:0x0095 TokenNumber 947:948 34:16:15 0 "0" + 150:0x0096 TokenSpace 948:949 34:17:16 " " + 151:0x0097 TokenIdent 949:951 34:18:17 "to" + 152:0x0098 TokenSpace 951:952 34:20:19 " " + 153:0x0099 TokenIdent 952:955 34:21:20 "max" + 154:0x009a TokenPunct 955:956 34:24:23 ";" + 155:0x009b TokenSpace 956:961 34:25:24 "\n " + 156:0x009c TokenIdent 961:971 35:5:4 "extensions" + 157:0x009d TokenSpace 971:972 35:15:14 " " + 158:0x009e TokenNumber 972:973 35:16:15 1 "1" + 159:0x009f TokenPunct 973:974 35:17:16 "," + 160:0x00a0 TokenSpace 974:975 35:18:17 " " + 161:0x00a1 TokenNumber 975:976 35:19:18 2 "2" + 162:0x00a2 TokenPunct 976:977 35:20:19 "," + 163:0x00a3 TokenSpace 977:978 35:21:20 " " + 164:0x00a4 TokenNumber 978:979 35:22:21 3 "3" + 165:0x00a5 TokenPunct 979:980 35:23:22 "," + 166:0x00a6 TokenSpace 980:981 35:24:23 " " + 167:0x00a7 TokenNumber 981:982 35:25:24 4 "4" + 168:0x00a8 TokenSpace 982:983 35:26:25 " " + 169:0x00a9 TokenIdent 983:985 35:27:26 "to" + 170:0x00aa TokenSpace 985:986 35:29:28 " " + 171:0x00ab TokenNumber 986:987 35:30:29 5 "5" + 172:0x00ac TokenPunct 987:988 35:31:30 "," + 173:0x00ad TokenSpace 988:989 35:32:31 " " + 174:0x00ae TokenNumber 989:990 35:33:32 6 "6" + 175:0x00af TokenPunct 990:991 35:34:33 ";" + 176:0x00b0 TokenSpace 991:1001 35:35:34 "\n \n " + 177:0x00b1 TokenIdent 1001:1009 37:5:4 "reserved" + 178:0x00b2 TokenSpace 1009:1010 37:13:12 " " + 179:0x00b3 TokenNumber 1010:1011 37:14:13 1 "1" + 180:0x00b4 TokenPunct 1011:1012 37:15:14 "," + 181:0x00b5 TokenSpace 1012:1013 37:16:15 " " + 182:0x00b6 TokenString 1013:1018 37:17:16 "foo" "\"foo\"" + 183:0x00b7 TokenPunct 1018:1019 37:22:21 ";" + 184:0x00b8 TokenSpace 1019:1024 37:23:22 "\n " + 185:0x00b9 TokenIdent 1024:1032 38:5:4 "reserved" + 186:0x00ba TokenSpace 1032:1033 38:13:12 " " + 187:0x00bb TokenNumber 1033:1034 38:14:13 2 "2" + 188:0x00bc TokenPunct 1034:1035 38:15:14 "," + 189:0x00bd TokenSpace 1035:1036 38:16:15 " " + 190:0x00be TokenNumber 1036:1037 38:17:16 3 "3" + 191:0x00bf TokenPunct 1037:1038 38:18:17 "," + 192:0x00c0 TokenSpace 1038:1039 38:19:18 " " + 193:0x00c1 TokenNumber 1039:1040 38:20:19 5 "5" + 194:0x00c2 TokenSpace 1040:1041 38:21:20 " " + 195:0x00c3 TokenIdent 1041:1043 38:22:21 "to" + 196:0x00c4 TokenSpace 1043:1044 38:24:23 " " + 197:0x00c5 TokenNumber 1044:1045 38:25:24 7 "7" + 198:0x00c6 TokenPunct 1045:1046 38:26:25 "," + 199:0x00c7 TokenSpace 1046:1047 38:27:26 " " + 200:0x00c8 TokenIdent 1047:1050 38:28:27 "foo" + 201:0x00c9 TokenPunct 1050:1051 38:31:30 "," + 202:0x00ca TokenSpace 1051:1052 38:32:31 " " + 203:0x00cb TokenString 1052:1057 38:33:32 "bar" "\"bar\"" + 204:0x00cc TokenPunct 1057:1058 38:38:37 ";" + 205:0x00cd TokenSpace 1058:1059 38:39:38 "\n" + 206:0x00ce TokenPunct 862:1060 30:10:9 "}" diff --git a/experimental/ast/testdata/parser/range/options.proto b/experimental/ast/testdata/parser/range/options.proto new file mode 100644 index 00000000..d796357d --- /dev/null +++ b/experimental/ast/testdata/parser/range/options.proto @@ -0,0 +1,22 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; + +message Foo { + extensions 1 [(allowed) = true]; + reserved 1 [(allowed) = false]; +} \ No newline at end of file diff --git a/experimental/ast/testdata/parser/range/options.proto.ast.json b/experimental/ast/testdata/parser/range/options.proto.ast.json new file mode 100644 index 00000000..57448703 --- /dev/null +++ b/experimental/ast/testdata/parser/range/options.proto.ast.json @@ -0,0 +1,152 @@ +{ + "file": { + "path": "testdata/parser/range/options.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7CgptZXNzYWdlIEZvbyB7CiAgICBleHRlbnNpb25zIDEgWyhhbGxvd2VkKSA9IHRydWVdOwogICAgcmVzZXJ2ZWQgMSBbKGFsbG93ZWQpID0gZmFsc2VdOwp9" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + }, + { + "def": { + "kind": "KIND_MESSAGE", + "name": { + "components": [ + { + "ident": "Foo", + "componentSpan": { "start": 649, "end": 652 } + } + ], + "span": { "start": 649, "end": 652 } + }, + "body": { + "decls": [ + { + "range": { + "kind": "KIND_EXTENSIONS", + "ranges": [ + { + "literal": { + "intValue": "1", + "span": { "start": 670, "end": 671 } + } + } + ], + "options": { + "entries": [ + { + "path": { + "components": [ + { + "extension": { + "span": { "start": 674, "end": 681 } + }, + "componentSpan": { "start": 674, "end": 681 } + } + ], + "span": { "start": 673, "end": 682 } + }, + "value": { + "path": { + "components": [ + { + "ident": "true", + "componentSpan": { "start": 685, "end": 689 } + } + ], + "span": { "start": 685, "end": 689 } + } + }, + "equalsSpan": { "start": 683, "end": 684 } + } + ], + "span": { "start": 672, "end": 690 } + }, + "span": { "start": 659, "end": 691 }, + "keywordSpan": { "start": 659, "end": 669 }, + "semicolonSpan": { "start": 690, "end": 691 } + } + }, + { + "range": { + "kind": "KIND_RESERVED", + "ranges": [ + { + "literal": { + "intValue": "1", + "span": { "start": 705, "end": 706 } + } + } + ], + "options": { + "entries": [ + { + "path": { + "components": [ + { + "extension": { + "span": { "start": 709, "end": 716 } + }, + "componentSpan": { "start": 709, "end": 716 } + } + ], + "span": { "start": 708, "end": 717 } + }, + "value": { + "path": { + "components": [ + { + "ident": "false", + "componentSpan": { "start": 720, "end": 725 } + } + ], + "span": { "start": 720, "end": 725 } + } + }, + "equalsSpan": { "start": 718, "end": 719 } + } + ], + "span": { "start": 707, "end": 726 } + }, + "span": { "start": 696, "end": 727 }, + "keywordSpan": { "start": 696, "end": 704 }, + "semicolonSpan": { "start": 726, "end": 727 } + } + } + ], + "span": { "start": 653, "end": 729 } + }, + "span": { "start": 641, "end": 729 }, + "keywordSpan": { "start": 641, "end": 648 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/range/options.proto.lex.tsv b/experimental/ast/testdata/parser/range/options.proto.lex.tsv new file mode 100644 index 00000000..a3d917b4 --- /dev/null +++ b/experimental/ast/testdata/parser/range/options.proto.lex.tsv @@ -0,0 +1,63 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:641 17:14:13 "\n\n" + 26:0x001a TokenIdent 641:648 19:1:0 "message" + 27:0x001b TokenSpace 648:649 19:8:7 " " + 28:0x001c TokenIdent 649:652 19:9:8 "Foo" + 29:0x001d TokenSpace 652:653 19:12:11 " " + 30:0x001e TokenPunct 653:729 19:13:12 "{" + 31:0x001f TokenSpace 654:659 19:14:13 "\n " + 32:0x0020 TokenIdent 659:669 20:5:4 "extensions" + 33:0x0021 TokenSpace 669:670 20:15:14 " " + 34:0x0022 TokenNumber 670:671 20:16:15 1 "1" + 35:0x0023 TokenSpace 671:672 20:17:16 " " + 36:0x0024 TokenPunct 672:690 20:18:17 "[" + 37:0x0025 TokenPunct 673:682 20:19:18 "(" + 38:0x0026 TokenIdent 674:681 20:20:19 "allowed" + 39:0x0027 TokenPunct 673:682 20:19:18 ")" + 40:0x0028 TokenSpace 682:683 20:28:27 " " + 41:0x0029 TokenPunct 683:684 20:29:28 "=" + 42:0x002a TokenSpace 684:685 20:30:29 " " + 43:0x002b TokenIdent 685:689 20:31:30 "true" + 44:0x002c TokenPunct 672:690 20:18:17 "]" + 45:0x002d TokenPunct 690:691 20:36:35 ";" + 46:0x002e TokenSpace 691:696 20:37:36 "\n " + 47:0x002f TokenIdent 696:704 21:5:4 "reserved" + 48:0x0030 TokenSpace 704:705 21:13:12 " " + 49:0x0031 TokenNumber 705:706 21:14:13 1 "1" + 50:0x0032 TokenSpace 706:707 21:15:14 " " + 51:0x0033 TokenPunct 707:726 21:16:15 "[" + 52:0x0034 TokenPunct 708:717 21:17:16 "(" + 53:0x0035 TokenIdent 709:716 21:18:17 "allowed" + 54:0x0036 TokenPunct 708:717 21:17:16 ")" + 55:0x0037 TokenSpace 717:718 21:26:25 " " + 56:0x0038 TokenPunct 718:719 21:27:26 "=" + 57:0x0039 TokenSpace 719:720 21:28:27 " " + 58:0x003a TokenIdent 720:725 21:29:28 "false" + 59:0x003b TokenPunct 707:726 21:16:15 "]" + 60:0x003c TokenPunct 726:727 21:35:34 ";" + 61:0x003d TokenSpace 727:728 21:36:35 "\n" + 62:0x003e TokenPunct 653:729 19:13:12 "}" diff --git a/experimental/ast/testdata/parser/range/options.proto.stderr b/experimental/ast/testdata/parser/range/options.proto.stderr new file mode 100644 index 00000000..ec14fc3b --- /dev/null +++ b/experimental/ast/testdata/parser/range/options.proto.stderr @@ -0,0 +1,7 @@ +error: options are not permitted on reserved ranges + --> testdata/parser/range/options.proto:21:16 + | +21 | reserved 1 [(allowed) = false]; + | ^^^^^^^^^^^^^^^^^^^ help: remove this + +encountered 1 error diff --git a/experimental/ast/testdata/parser/syntax/2023.proto b/experimental/ast/testdata/parser/syntax/2023.proto new file mode 100644 index 00000000..cf647afa --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/2023.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +edition = "2023"; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/2023.proto.ast.json b/experimental/ast/testdata/parser/syntax/2023.proto.ast.json new file mode 100755 index 00000000..1c09966d --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/2023.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/2023.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKZWRpdGlvbiA9ICIyMDIzIjsKCnBhY2thZ2UgdGVzdDsK" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_EDITION", + "value": { + "literal": { + "stringValue": "2023", + "span": { "start": 616, "end": 622 } + } + }, + "span": { "start": 606, "end": 623 }, + "keywordSpan": { "start": 606, "end": 613 }, + "equalsSpan": { "start": 614, "end": 615 }, + "semicolonSpan": { "start": 622, "end": 623 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 633, "end": 637 } + } + ], + "span": { "start": 633, "end": 637 } + }, + "span": { "start": 625, "end": 638 }, + "keywordSpan": { "start": 625, "end": 632 }, + "semicolonSpan": { "start": 637, "end": 638 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/2023.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/2023.proto.lex.tsv new file mode 100755 index 00000000..bfdd6a41 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/2023.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:613 15:1:0 "edition" + 15:0x000f TokenSpace 613:614 15:8:7 " " + 16:0x0010 TokenPunct 614:615 15:9:8 "=" + 17:0x0011 TokenSpace 615:616 15:10:9 " " + 18:0x0012 TokenString 616:622 15:11:10 "2023" "\"2023\"" + 19:0x0013 TokenPunct 622:623 15:17:16 ";" + 20:0x0014 TokenSpace 623:625 15:18:17 "\n\n" + 21:0x0015 TokenIdent 625:632 17:1:0 "package" + 22:0x0016 TokenSpace 632:633 17:8:7 " " + 23:0x0017 TokenIdent 633:637 17:9:8 "test" + 24:0x0018 TokenPunct 637:638 17:13:12 ";" + 25:0x0019 TokenSpace 638:639 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/2024.proto b/experimental/ast/testdata/parser/syntax/2024.proto new file mode 100644 index 00000000..c72a23e5 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/2024.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +edition = "2024"; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/2024.proto.ast.json b/experimental/ast/testdata/parser/syntax/2024.proto.ast.json new file mode 100755 index 00000000..a0d525a5 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/2024.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/2024.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKZWRpdGlvbiA9ICIyMDI0IjsKCnBhY2thZ2UgdGVzdDsK" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_EDITION", + "value": { + "literal": { + "stringValue": "2024", + "span": { "start": 616, "end": 622 } + } + }, + "span": { "start": 606, "end": 623 }, + "keywordSpan": { "start": 606, "end": 613 }, + "equalsSpan": { "start": 614, "end": 615 }, + "semicolonSpan": { "start": 622, "end": 623 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 633, "end": 637 } + } + ], + "span": { "start": 633, "end": 637 } + }, + "span": { "start": 625, "end": 638 }, + "keywordSpan": { "start": 625, "end": 632 }, + "semicolonSpan": { "start": 637, "end": 638 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/2024.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/2024.proto.lex.tsv new file mode 100755 index 00000000..c3044b0a --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/2024.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:613 15:1:0 "edition" + 15:0x000f TokenSpace 613:614 15:8:7 " " + 16:0x0010 TokenPunct 614:615 15:9:8 "=" + 17:0x0011 TokenSpace 615:616 15:10:9 " " + 18:0x0012 TokenString 616:622 15:11:10 "2024" "\"2024\"" + 19:0x0013 TokenPunct 622:623 15:17:16 ";" + 20:0x0014 TokenSpace 623:625 15:18:17 "\n\n" + 21:0x0015 TokenIdent 625:632 17:1:0 "package" + 22:0x0016 TokenSpace 632:633 17:8:7 " " + 23:0x0017 TokenIdent 633:637 17:9:8 "test" + 24:0x0018 TokenPunct 637:638 17:13:12 ";" + 25:0x0019 TokenSpace 638:639 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/2024.proto.stderr b/experimental/ast/testdata/parser/syntax/2024.proto.stderr new file mode 100644 index 00000000..c4b95ee9 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/2024.proto.stderr @@ -0,0 +1,8 @@ +error: "2024" is not a valid edition + --> testdata/parser/syntax/2024.proto:15:11 + | +15 | edition = "2024"; + | ^^^^^^ + = note: protocompile only recognizes "2023" + +encountered 1 error diff --git a/experimental/ast/testdata/parser/syntax/edition_proto2.proto b/experimental/ast/testdata/parser/syntax/edition_proto2.proto new file mode 100644 index 00000000..c09c5661 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/edition_proto2.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +edition = "proto2"; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/edition_proto2.proto.ast.json b/experimental/ast/testdata/parser/syntax/edition_proto2.proto.ast.json new file mode 100755 index 00000000..87b3b4d9 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/edition_proto2.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/edition_proto2.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKZWRpdGlvbiA9ICJwcm90bzIiOwoKcGFja2FnZSB0ZXN0Owo=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_EDITION", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 616, "end": 624 } + } + }, + "span": { "start": 606, "end": 625 }, + "keywordSpan": { "start": 606, "end": 613 }, + "equalsSpan": { "start": 614, "end": 615 }, + "semicolonSpan": { "start": 624, "end": 625 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 635, "end": 639 } + } + ], + "span": { "start": 635, "end": 639 } + }, + "span": { "start": 627, "end": 640 }, + "keywordSpan": { "start": 627, "end": 634 }, + "semicolonSpan": { "start": 639, "end": 640 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/edition_proto2.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/edition_proto2.proto.lex.tsv new file mode 100755 index 00000000..a287af29 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/edition_proto2.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:613 15:1:0 "edition" + 15:0x000f TokenSpace 613:614 15:8:7 " " + 16:0x0010 TokenPunct 614:615 15:9:8 "=" + 17:0x0011 TokenSpace 615:616 15:10:9 " " + 18:0x0012 TokenString 616:624 15:11:10 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 624:625 15:19:18 ";" + 20:0x0014 TokenSpace 625:627 15:20:19 "\n\n" + 21:0x0015 TokenIdent 627:634 17:1:0 "package" + 22:0x0016 TokenSpace 634:635 17:8:7 " " + 23:0x0017 TokenIdent 635:639 17:9:8 "test" + 24:0x0018 TokenPunct 639:640 17:13:12 ";" + 25:0x0019 TokenSpace 640:641 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/edition_proto2.proto.stderr b/experimental/ast/testdata/parser/syntax/edition_proto2.proto.stderr new file mode 100644 index 00000000..8e6c7c87 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/edition_proto2.proto.stderr @@ -0,0 +1,8 @@ +error: "proto2" is not a valid edition + --> testdata/parser/syntax/edition_proto2.proto:15:11 + | +15 | edition = "proto2"; + | ^^^^^^^^ + = note: protocompile only recognizes "2023" + +encountered 1 error diff --git a/experimental/ast/testdata/parser/syntax/eof_after_eq.proto b/experimental/ast/testdata/parser/syntax/eof_after_eq.proto new file mode 100644 index 00000000..eda5ce74 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/eof_after_eq.proto @@ -0,0 +1,15 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = diff --git a/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.ast.json b/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.ast.json new file mode 100644 index 00000000..5d487d68 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.ast.json @@ -0,0 +1,16 @@ +{ + "file": { + "path": "testdata/parser/syntax/eof_after_eq.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gCg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "span": { "start": 606, "end": 614 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.lex.tsv new file mode 100644 index 00000000..a92efd59 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.lex.tsv @@ -0,0 +1,18 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:616 15:9:8 " \n" diff --git a/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.stderr b/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.stderr new file mode 100644 index 00000000..29c284c2 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/eof_after_eq.proto.stderr @@ -0,0 +1,13 @@ +error: unexpected end-of-file in expression + --> testdata/parser/syntax/eof_after_eq.proto:15:9 + | +15 | syntax = + | ^ expected identifier, number, string, `.`, `-`, `(...)`, `[...]`, or `{...}` + +warning: missing package declaration + --> testdata/parser/syntax/eof_after_eq.proto + = note: omitting the `package` keyword implies an empty package + = help: using the empty package is discouraged + = help: explicitly add `package ...;` at the top of the file, after the syntax declaration + +encountered 1 error and 1 warning diff --git a/experimental/ast/testdata/parser/syntax/eof_after_kw.proto b/experimental/ast/testdata/parser/syntax/eof_after_kw.proto new file mode 100644 index 00000000..c985ca41 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/eof_after_kw.proto @@ -0,0 +1,15 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax diff --git a/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.ast.json b/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.ast.json new file mode 100644 index 00000000..f578577c --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.ast.json @@ -0,0 +1,15 @@ +{ + "file": { + "path": "testdata/parser/syntax/eof_after_kw.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "span": { "start": 606, "end": 612 }, + "keywordSpan": { "start": 606, "end": 612 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.lex.tsv new file mode 100644 index 00000000..fb410e8f --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.lex.tsv @@ -0,0 +1,16 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 "\n" diff --git a/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.stderr b/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.stderr new file mode 100644 index 00000000..21e38249 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/eof_after_kw.proto.stderr @@ -0,0 +1,13 @@ +error: unexpected end-of-file in syntax declaration + --> testdata/parser/syntax/eof_after_kw.proto:15:7 + | +15 | syntax + | ^ expected `=` + +warning: missing package declaration + --> testdata/parser/syntax/eof_after_kw.proto + = note: omitting the `package` keyword implies an empty package + = help: using the empty package is discouraged + = help: explicitly add `package ...;` at the top of the file, after the syntax declaration + +encountered 1 error and 1 warning diff --git a/experimental/ast/testdata/parser/syntax/invalid.proto b/experimental/ast/testdata/parser/syntax/invalid.proto new file mode 100644 index 00000000..7f423596 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/invalid.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = invalid; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/invalid.proto.ast.json b/experimental/ast/testdata/parser/syntax/invalid.proto.ast.json new file mode 100644 index 00000000..2bfd9e4c --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/invalid.proto.ast.json @@ -0,0 +1,44 @@ +{ + "file": { + "path": "testdata/parser/syntax/invalid.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gaW52YWxpZDsKCnBhY2thZ2UgdGVzdDsK" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "path": { + "components": [ + { + "ident": "invalid", + "componentSpan": { "start": 615, "end": 622 } + } + ], + "span": { "start": 615, "end": 622 } + } + }, + "span": { "start": 606, "end": 623 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 622, "end": 623 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 633, "end": 637 } + } + ], + "span": { "start": 633, "end": 637 } + }, + "span": { "start": 625, "end": 638 }, + "keywordSpan": { "start": 625, "end": 632 }, + "semicolonSpan": { "start": 637, "end": 638 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/invalid.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/invalid.proto.lex.tsv new file mode 100644 index 00000000..b566bd74 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/invalid.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenIdent 615:622 15:10:9 "invalid" + 19:0x0013 TokenPunct 622:623 15:17:16 ";" + 20:0x0014 TokenSpace 623:625 15:18:17 "\n\n" + 21:0x0015 TokenIdent 625:632 17:1:0 "package" + 22:0x0016 TokenSpace 632:633 17:8:7 " " + 23:0x0017 TokenIdent 633:637 17:9:8 "test" + 24:0x0018 TokenPunct 637:638 17:13:12 ";" + 25:0x0019 TokenSpace 638:639 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/invalid.proto.stderr b/experimental/ast/testdata/parser/syntax/invalid.proto.stderr new file mode 100644 index 00000000..53b66def --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/invalid.proto.stderr @@ -0,0 +1,7 @@ +error: unexpected path in syntax declaration + --> testdata/parser/syntax/invalid.proto:15:10 + | +15 | syntax = invalid; + | ^^^^^^^ expected string literal + +encountered 1 error diff --git a/experimental/ast/testdata/parser/syntax/lonely.proto b/experimental/ast/testdata/parser/syntax/lonely.proto new file mode 100644 index 00000000..6fe7ab10 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/lonely.proto @@ -0,0 +1,19 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +edition; + +syntax = ; + +package test; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/syntax/lonely.proto.ast.json b/experimental/ast/testdata/parser/syntax/lonely.proto.ast.json new file mode 100755 index 00000000..8b86f965 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/lonely.proto.ast.json @@ -0,0 +1,41 @@ +{ + "file": { + "path": "testdata/parser/syntax/lonely.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKZWRpdGlvbjsKCnN5bnRheCA9IDsKCnBhY2thZ2UgdGVzdDs=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_EDITION", + "span": { "start": 606, "end": 614 }, + "keywordSpan": { "start": 606, "end": 613 }, + "semicolonSpan": { "start": 613, "end": 614 } + } + }, + { + "syntax": { + "kind": "KIND_SYNTAX", + "span": { "start": 616, "end": 626 }, + "keywordSpan": { "start": 616, "end": 622 }, + "equalsSpan": { "start": 623, "end": 624 }, + "semicolonSpan": { "start": 625, "end": 626 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 636, "end": 640 } + } + ], + "span": { "start": 636, "end": 640 } + }, + "span": { "start": 628, "end": 641 }, + "keywordSpan": { "start": 628, "end": 635 }, + "semicolonSpan": { "start": 640, "end": 641 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/lonely.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/lonely.proto.lex.tsv new file mode 100755 index 00000000..eb379d58 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/lonely.proto.lex.tsv @@ -0,0 +1,27 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:613 15:1:0 "edition" + 15:0x000f TokenPunct 613:614 15:8:7 ";" + 16:0x0010 TokenSpace 614:616 15:9:8 "\n\n" + 17:0x0011 TokenIdent 616:622 17:1:0 "syntax" + 18:0x0012 TokenSpace 622:623 17:7:6 " " + 19:0x0013 TokenPunct 623:624 17:8:7 "=" + 20:0x0014 TokenSpace 624:625 17:9:8 " " + 21:0x0015 TokenPunct 625:626 17:10:9 ";" + 22:0x0016 TokenSpace 626:628 17:11:10 "\n\n" + 23:0x0017 TokenIdent 628:635 19:1:0 "package" + 24:0x0018 TokenSpace 635:636 19:8:7 " " + 25:0x0019 TokenIdent 636:640 19:9:8 "test" + 26:0x001a TokenPunct 640:641 19:13:12 ";" diff --git a/experimental/ast/testdata/parser/syntax/lonely.proto.stderr b/experimental/ast/testdata/parser/syntax/lonely.proto.stderr new file mode 100755 index 00000000..ecf5439f --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/lonely.proto.stderr @@ -0,0 +1,16 @@ +error: unexpected `;` in edition declaration + --> testdata/parser/syntax/lonely.proto:15:8 + | +15 | edition; + | ^ expected `=` + +error: syntax declaration must be the first declaration in a file + --> testdata/parser/syntax/lonely.proto:17:1 + | +15 | edition; + | -------- previous declaration +16 | +17 | syntax = ; + | ^^^^^^^^^^ expected this to be the first declaration + +encountered 2 errors diff --git a/experimental/ast/testdata/parser/syntax/missing.proto b/experimental/ast/testdata/parser/syntax/missing.proto new file mode 100644 index 00000000..b915535a --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/missing.proto @@ -0,0 +1,15 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package test; diff --git a/experimental/ast/testdata/parser/syntax/missing.proto.ast.json b/experimental/ast/testdata/parser/syntax/missing.proto.ast.json new file mode 100755 index 00000000..a8f97261 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/missing.proto.ast.json @@ -0,0 +1,24 @@ +{ + "file": { + "path": "testdata/parser/syntax/missing.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKcGFja2FnZSB0ZXN0Owo=" + }, + "decls": [ + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 614, "end": 618 } + } + ], + "span": { "start": 614, "end": 618 } + }, + "span": { "start": 606, "end": 619 }, + "keywordSpan": { "start": 606, "end": 613 }, + "semicolonSpan": { "start": 618, "end": 619 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/missing.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/missing.proto.lex.tsv new file mode 100755 index 00000000..538741d3 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/missing.proto.lex.tsv @@ -0,0 +1,19 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:613 15:1:0 "package" + 15:0x000f TokenSpace 613:614 15:8:7 " " + 16:0x0010 TokenIdent 614:618 15:9:8 "test" + 17:0x0011 TokenPunct 618:619 15:13:12 ";" + 18:0x0012 TokenSpace 619:620 15:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/missing.proto.stderr b/experimental/ast/testdata/parser/syntax/missing.proto.stderr new file mode 100755 index 00000000..7f2ba8e4 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/missing.proto.stderr @@ -0,0 +1,6 @@ +warning: missing syntax declaration + --> testdata/parser/syntax/missing.proto + = note: omitting the `syntax` keyword implies "proto2" by default + = help: explicitly add `syntax = "proto2";` at the top of the file + + encountered 1 warning diff --git a/experimental/ast/testdata/parser/syntax/not_first.proto b/experimental/ast/testdata/parser/syntax/not_first.proto new file mode 100644 index 00000000..8ea57192 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/not_first.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package test; + +syntax = "proto2"; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/syntax/not_first.proto.ast.json b/experimental/ast/testdata/parser/syntax/not_first.proto.ast.json new file mode 100755 index 00000000..d6fcb1c3 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/not_first.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/not_first.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKcGFja2FnZSB0ZXN0OwoKc3ludGF4ID0gInByb3RvMiI7" + }, + "decls": [ + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 614, "end": 618 } + } + ], + "span": { "start": 614, "end": 618 } + }, + "span": { "start": 606, "end": 619 }, + "keywordSpan": { "start": 606, "end": 613 }, + "semicolonSpan": { "start": 618, "end": 619 } + } + }, + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 630, "end": 638 } + } + }, + "span": { "start": 621, "end": 639 }, + "keywordSpan": { "start": 621, "end": 627 }, + "equalsSpan": { "start": 628, "end": 629 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/not_first.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/not_first.proto.lex.tsv new file mode 100755 index 00000000..9384ace6 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/not_first.proto.lex.tsv @@ -0,0 +1,25 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:613 15:1:0 "package" + 15:0x000f TokenSpace 613:614 15:8:7 " " + 16:0x0010 TokenIdent 614:618 15:9:8 "test" + 17:0x0011 TokenPunct 618:619 15:13:12 ";" + 18:0x0012 TokenSpace 619:621 15:14:13 "\n\n" + 19:0x0013 TokenIdent 621:627 17:1:0 "syntax" + 20:0x0014 TokenSpace 627:628 17:7:6 " " + 21:0x0015 TokenPunct 628:629 17:8:7 "=" + 22:0x0016 TokenSpace 629:630 17:9:8 " " + 23:0x0017 TokenString 630:638 17:10:9 "proto2" "\"proto2\"" + 24:0x0018 TokenPunct 638:639 17:18:17 ";" diff --git a/experimental/ast/testdata/parser/syntax/not_first.proto.stderr b/experimental/ast/testdata/parser/syntax/not_first.proto.stderr new file mode 100755 index 00000000..81828005 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/not_first.proto.stderr @@ -0,0 +1,10 @@ +error: syntax declaration must be the first declaration in a file + --> testdata/parser/syntax/not_first.proto:17:1 + | +15 | package test; + | ------------- previous declaration +16 | +17 | syntax = "proto2"; + | ^^^^^^^^^^^^^^^^^^ expected this to be the first declaration + +encountered 1 error diff --git a/experimental/ast/testdata/parser/syntax/options.proto b/experimental/ast/testdata/parser/syntax/options.proto new file mode 100644 index 00000000..f0ea4ff7 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/options.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2" [(not.allowed) = "here"]; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/options.proto.ast.json b/experimental/ast/testdata/parser/syntax/options.proto.ast.json new file mode 100644 index 00000000..f5fe8855 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/options.proto.ast.json @@ -0,0 +1,76 @@ +{ + "file": { + "path": "testdata/parser/syntax/options.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiIgWyhub3QuYWxsb3dlZCkgPSAiaGVyZSJdOwoKcGFja2FnZSB0ZXN0Owo=" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "options": { + "entries": [ + { + "path": { + "components": [ + { + "extension": { + "components": [ + { + "ident": "not", + "componentSpan": { "start": 626, "end": 629 } + }, + { + "ident": "allowed", + "separator": "SEPARATOR_DOT", + "componentSpan": { "start": 630, "end": 637 }, + "separatorSpan": { "start": 629, "end": 630 } + } + ], + "span": { "start": 626, "end": 637 } + }, + "componentSpan": { "start": 626, "end": 637 } + } + ], + "span": { "start": 625, "end": 638 } + }, + "value": { + "literal": { + "stringValue": "here", + "span": { "start": 641, "end": 647 } + } + }, + "equalsSpan": { "start": 639, "end": 640 } + } + ], + "span": { "start": 624, "end": 648 } + }, + "span": { "start": 606, "end": 649 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 648, "end": 649 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 659, "end": 663 } + } + ], + "span": { "start": 659, "end": 663 } + }, + "span": { "start": 651, "end": 664 }, + "keywordSpan": { "start": 651, "end": 658 }, + "semicolonSpan": { "start": 663, "end": 664 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/options.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/options.proto.lex.tsv new file mode 100644 index 00000000..af73cc44 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/options.proto.lex.tsv @@ -0,0 +1,38 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenSpace 623:624 15:18:17 " " + 20:0x0014 TokenPunct 624:648 15:19:18 "[" + 21:0x0015 TokenPunct 625:638 15:20:19 "(" + 22:0x0016 TokenIdent 626:629 15:21:20 "not" + 23:0x0017 TokenPunct 629:630 15:24:23 "." + 24:0x0018 TokenIdent 630:637 15:25:24 "allowed" + 25:0x0019 TokenPunct 625:638 15:20:19 ")" + 26:0x001a TokenSpace 638:639 15:33:32 " " + 27:0x001b TokenPunct 639:640 15:34:33 "=" + 28:0x001c TokenSpace 640:641 15:35:34 " " + 29:0x001d TokenString 641:647 15:36:35 "here" "\"here\"" + 30:0x001e TokenPunct 624:648 15:19:18 "]" + 31:0x001f TokenPunct 648:649 15:43:42 ";" + 32:0x0020 TokenSpace 649:651 15:44:43 "\n\n" + 33:0x0021 TokenIdent 651:658 17:1:0 "package" + 34:0x0022 TokenSpace 658:659 17:8:7 " " + 35:0x0023 TokenIdent 659:663 17:9:8 "test" + 36:0x0024 TokenPunct 663:664 17:13:12 ";" + 37:0x0025 TokenSpace 664:665 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/options.proto.stderr b/experimental/ast/testdata/parser/syntax/options.proto.stderr new file mode 100644 index 00000000..c2c2f7d6 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/options.proto.stderr @@ -0,0 +1,7 @@ +error: options are not permitted on syntax declarations + --> testdata/parser/syntax/options.proto:15:19 + | +15 | syntax = "proto2" [(not.allowed) = "here"]; + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this + +encountered 1 error diff --git a/experimental/ast/testdata/parser/syntax/proto2.proto b/experimental/ast/testdata/parser/syntax/proto2.proto new file mode 100644 index 00000000..9ff43bb3 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/proto2.proto.ast.json b/experimental/ast/testdata/parser/syntax/proto2.proto.ast.json new file mode 100755 index 00000000..071f34dd --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/proto2.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMiI7CgpwYWNrYWdlIHRlc3Q7Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/proto2.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/proto2.proto.lex.tsv new file mode 100755 index 00000000..377e1cf8 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto2" "\"proto2\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:640 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/proto2_escaped.proto b/experimental/ast/testdata/parser/syntax/proto2_escaped.proto new file mode 100644 index 00000000..1efa2c0d --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2_escaped.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto\x32"; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.ast.json b/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.ast.json new file mode 100755 index 00000000..6f2dc265 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/proto2_escaped.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvXHgzMiI7CgpwYWNrYWdlIHRlc3Q7Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 626 } + } + }, + "span": { "start": 606, "end": 627 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 626, "end": 627 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 637, "end": 641 } + } + ], + "span": { "start": 637, "end": 641 } + }, + "span": { "start": 629, "end": 642 }, + "keywordSpan": { "start": 629, "end": 636 }, + "semicolonSpan": { "start": 641, "end": 642 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.lex.tsv new file mode 100755 index 00000000..ff930a04 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:626 15:10:9 "proto2" "\"proto\\x32\"" + 19:0x0013 TokenPunct 626:627 15:21:20 ";" + 20:0x0014 TokenSpace 627:629 15:22:21 "\n\n" + 21:0x0015 TokenIdent 629:636 17:1:0 "package" + 22:0x0016 TokenSpace 636:637 17:8:7 " " + 23:0x0017 TokenIdent 637:641 17:9:8 "test" + 24:0x0018 TokenPunct 641:642 17:13:12 ";" + 25:0x0019 TokenSpace 642:643 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.stderr b/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.stderr new file mode 100644 index 00000000..eaefd996 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2_escaped.proto.stderr @@ -0,0 +1,7 @@ +warning: `syntax` value should be a single, escape-less string + --> testdata/parser/syntax/proto2_escaped.proto:15:10 + | +15 | syntax = "proto\x32"; + | ^^^^^^^^^^^ help: change this to "proto2" + + encountered 1 warning diff --git a/experimental/ast/testdata/parser/syntax/proto2_split.proto b/experimental/ast/testdata/parser/syntax/proto2_split.proto new file mode 100644 index 00000000..36f72cb8 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2_split.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto" "2"; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/proto2_split.proto.ast.json b/experimental/ast/testdata/parser/syntax/proto2_split.proto.ast.json new file mode 100755 index 00000000..90b10b65 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2_split.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/proto2_split.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvIiAiMiI7CgpwYWNrYWdlIHRlc3Q7Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto2", + "span": { "start": 615, "end": 626 } + } + }, + "span": { "start": 606, "end": 627 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 626, "end": 627 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 637, "end": 641 } + } + ], + "span": { "start": 637, "end": 641 } + }, + "span": { "start": 629, "end": 642 }, + "keywordSpan": { "start": 629, "end": 636 }, + "semicolonSpan": { "start": 641, "end": 642 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/proto2_split.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/proto2_split.proto.lex.tsv new file mode 100755 index 00000000..e3255c51 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2_split.proto.lex.tsv @@ -0,0 +1,28 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:626 15:10:9 "proto2" "\"proto\"" + 19:0x0013 TokenSpace 622:623 15:17:16 " " + 20:0x0014 TokenString 615:626 15:10:9 "2" "\"2\"" + 21:0x0015 TokenPunct 626:627 15:21:20 ";" + 22:0x0016 TokenSpace 627:629 15:22:21 "\n\n" + 23:0x0017 TokenIdent 629:636 17:1:0 "package" + 24:0x0018 TokenSpace 636:637 17:8:7 " " + 25:0x0019 TokenIdent 637:641 17:9:8 "test" + 26:0x001a TokenPunct 641:642 17:13:12 ";" + 27:0x001b TokenSpace 642:643 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/proto2_split.proto.stderr b/experimental/ast/testdata/parser/syntax/proto2_split.proto.stderr new file mode 100644 index 00000000..beaf614c --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto2_split.proto.stderr @@ -0,0 +1,7 @@ +warning: `syntax` value should be a single, escape-less string + --> testdata/parser/syntax/proto2_split.proto:15:10 + | +15 | syntax = "proto" "2"; + | ^^^^^^^^^^^ help: change this to "proto2" + + encountered 1 warning diff --git a/experimental/ast/testdata/parser/syntax/proto3.proto b/experimental/ast/testdata/parser/syntax/proto3.proto new file mode 100644 index 00000000..d8019fb1 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto3.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/proto3.proto.ast.json b/experimental/ast/testdata/parser/syntax/proto3.proto.ast.json new file mode 100755 index 00000000..461899ef --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto3.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/proto3.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvMyI7CgpwYWNrYWdlIHRlc3Q7Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto3", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/proto3.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/proto3.proto.lex.tsv new file mode 100755 index 00000000..8b8d32c9 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto3.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto3" "\"proto3\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:640 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/proto4.proto b/experimental/ast/testdata/parser/syntax/proto4.proto new file mode 100644 index 00000000..ecc3ef2c --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto4.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto4"; + +package test; diff --git a/experimental/ast/testdata/parser/syntax/proto4.proto.ast.json b/experimental/ast/testdata/parser/syntax/proto4.proto.ast.json new file mode 100755 index 00000000..bb566473 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto4.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/proto4.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gInByb3RvNCI7CgpwYWNrYWdlIHRlc3Q7Cg==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "literal": { + "stringValue": "proto4", + "span": { "start": 615, "end": 623 } + } + }, + "span": { "start": 606, "end": 624 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 623, "end": 624 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 634, "end": 638 } + } + ], + "span": { "start": 634, "end": 638 } + }, + "span": { "start": 626, "end": 639 }, + "keywordSpan": { "start": 626, "end": 633 }, + "semicolonSpan": { "start": 638, "end": 639 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/proto4.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/proto4.proto.lex.tsv new file mode 100755 index 00000000..3feafff8 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto4.proto.lex.tsv @@ -0,0 +1,26 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenString 615:623 15:10:9 "proto4" "\"proto4\"" + 19:0x0013 TokenPunct 623:624 15:18:17 ";" + 20:0x0014 TokenSpace 624:626 15:19:18 "\n\n" + 21:0x0015 TokenIdent 626:633 17:1:0 "package" + 22:0x0016 TokenSpace 633:634 17:8:7 " " + 23:0x0017 TokenIdent 634:638 17:9:8 "test" + 24:0x0018 TokenPunct 638:639 17:13:12 ";" + 25:0x0019 TokenSpace 639:640 17:14:13 "\n" diff --git a/experimental/ast/testdata/parser/syntax/proto4.proto.stderr b/experimental/ast/testdata/parser/syntax/proto4.proto.stderr new file mode 100644 index 00000000..c33db610 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/proto4.proto.stderr @@ -0,0 +1,8 @@ +error: "proto4" is not a valid syntax + --> testdata/parser/syntax/proto4.proto:15:10 + | +15 | syntax = "proto4"; + | ^^^^^^^^ + = note: protocompile only recognizes "proto2", "proto3" + +encountered 1 error diff --git a/experimental/ast/testdata/parser/syntax/unquoted.proto b/experimental/ast/testdata/parser/syntax/unquoted.proto new file mode 100644 index 00000000..8cc1eb88 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/unquoted.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = proto2; + +package test; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/syntax/unquoted.proto.ast.json b/experimental/ast/testdata/parser/syntax/unquoted.proto.ast.json new file mode 100755 index 00000000..48652625 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/unquoted.proto.ast.json @@ -0,0 +1,44 @@ +{ + "file": { + "path": "testdata/parser/syntax/unquoted.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKc3ludGF4ID0gcHJvdG8yOwoKcGFja2FnZSB0ZXN0Ow==" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_SYNTAX", + "value": { + "path": { + "components": [ + { + "ident": "proto2", + "componentSpan": { "start": 615, "end": 621 } + } + ], + "span": { "start": 615, "end": 621 } + } + }, + "span": { "start": 606, "end": 622 }, + "keywordSpan": { "start": 606, "end": 612 }, + "equalsSpan": { "start": 613, "end": 614 }, + "semicolonSpan": { "start": 621, "end": 622 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 632, "end": 636 } + } + ], + "span": { "start": 632, "end": 636 } + }, + "span": { "start": 624, "end": 637 }, + "keywordSpan": { "start": 624, "end": 631 }, + "semicolonSpan": { "start": 636, "end": 637 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/unquoted.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/unquoted.proto.lex.tsv new file mode 100755 index 00000000..b96f51fd --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/unquoted.proto.lex.tsv @@ -0,0 +1,25 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:612 15:1:0 "syntax" + 15:0x000f TokenSpace 612:613 15:7:6 " " + 16:0x0010 TokenPunct 613:614 15:8:7 "=" + 17:0x0011 TokenSpace 614:615 15:9:8 " " + 18:0x0012 TokenIdent 615:621 15:10:9 "proto2" + 19:0x0013 TokenPunct 621:622 15:16:15 ";" + 20:0x0014 TokenSpace 622:624 15:17:16 "\n\n" + 21:0x0015 TokenIdent 624:631 17:1:0 "package" + 22:0x0016 TokenSpace 631:632 17:8:7 " " + 23:0x0017 TokenIdent 632:636 17:9:8 "test" + 24:0x0018 TokenPunct 636:637 17:13:12 ";" diff --git a/experimental/ast/testdata/parser/syntax/unquoted.proto.stderr b/experimental/ast/testdata/parser/syntax/unquoted.proto.stderr new file mode 100755 index 00000000..59bb404f --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/unquoted.proto.stderr @@ -0,0 +1,7 @@ +error: missing quotes around syntax value + --> testdata/parser/syntax/unquoted.proto:15:10 + | +15 | syntax = proto2; + | ^^^^^^ help: wrap this in quotes + +encountered 1 error diff --git a/experimental/ast/testdata/parser/syntax/unquoted_edition.proto b/experimental/ast/testdata/parser/syntax/unquoted_edition.proto new file mode 100644 index 00000000..573227d5 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/unquoted_edition.proto @@ -0,0 +1,17 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +edition = 2023; + +package test; \ No newline at end of file diff --git a/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.ast.json b/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.ast.json new file mode 100755 index 00000000..91399e1e --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.ast.json @@ -0,0 +1,39 @@ +{ + "file": { + "path": "testdata/parser/syntax/unquoted_edition.proto", + "text": "Ly8gQ29weXJpZ2h0IDIwMjAtMjAyNCBCdWYgVGVjaG5vbG9naWVzLCBJbmMuCi8vCi8vIExpY2Vuc2VkIHVuZGVyIHRoZSBBcGFjaGUgTGljZW5zZSwgVmVyc2lvbiAyLjAgKHRoZSAiTGljZW5zZSIpOwovLyB5b3UgbWF5IG5vdCB1c2UgdGhpcyBmaWxlIGV4Y2VwdCBpbiBjb21wbGlhbmNlIHdpdGggdGhlIExpY2Vuc2UuCi8vIFlvdSBtYXkgb2J0YWluIGEgY29weSBvZiB0aGUgTGljZW5zZSBhdAovLwovLyAgICAgIGh0dHA6Ly93d3cuYXBhY2hlLm9yZy9saWNlbnNlcy9MSUNFTlNFLTIuMAovLwovLyBVbmxlc3MgcmVxdWlyZWQgYnkgYXBwbGljYWJsZSBsYXcgb3IgYWdyZWVkIHRvIGluIHdyaXRpbmcsIHNvZnR3YXJlCi8vIGRpc3RyaWJ1dGVkIHVuZGVyIHRoZSBMaWNlbnNlIGlzIGRpc3RyaWJ1dGVkIG9uIGFuICJBUyBJUyIgQkFTSVMsCi8vIFdJVEhPVVQgV0FSUkFOVElFUyBPUiBDT05ESVRJT05TIE9GIEFOWSBLSU5ELCBlaXRoZXIgZXhwcmVzcyBvciBpbXBsaWVkLgovLyBTZWUgdGhlIExpY2Vuc2UgZm9yIHRoZSBzcGVjaWZpYyBsYW5ndWFnZSBnb3Zlcm5pbmcgcGVybWlzc2lvbnMgYW5kCi8vIGxpbWl0YXRpb25zIHVuZGVyIHRoZSBMaWNlbnNlLgoKZWRpdGlvbiA9IDIwMjM7CgpwYWNrYWdlIHRlc3Q7" + }, + "decls": [ + { + "syntax": { + "kind": "KIND_EDITION", + "value": { + "literal": { + "intValue": "2023", + "span": { "start": 616, "end": 620 } + } + }, + "span": { "start": 606, "end": 621 }, + "keywordSpan": { "start": 606, "end": 613 }, + "equalsSpan": { "start": 614, "end": 615 }, + "semicolonSpan": { "start": 620, "end": 621 } + } + }, + { + "package": { + "path": { + "components": [ + { + "ident": "test", + "componentSpan": { "start": 631, "end": 635 } + } + ], + "span": { "start": 631, "end": 635 } + }, + "span": { "start": 623, "end": 636 }, + "keywordSpan": { "start": 623, "end": 630 }, + "semicolonSpan": { "start": 635, "end": 636 } + } + } + ] +} diff --git a/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.lex.tsv b/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.lex.tsv new file mode 100755 index 00000000..9823203e --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.lex.tsv @@ -0,0 +1,25 @@ + 0:0x0000 TokenComment 0:46 1:1:0 "// Copyright 2020-2024 Buf Technologies, Inc.\n" + 1:0x0001 TokenComment 46:49 2:1:0 "//\n" + 2:0x0002 TokenComment 49:116 3:1:0 "// Licensed under the Apache License, Version 2.0 (the \"License\");\n" + 3:0x0003 TokenComment 116:184 4:1:0 "// you may not use this file except in compliance with the License.\n" + 4:0x0004 TokenComment 184:227 5:1:0 "// You may obtain a copy of the License at\n" + 5:0x0005 TokenComment 227:230 6:1:0 "//\n" + 6:0x0006 TokenComment 230:281 7:1:0 "// http://www.apache.org/licenses/LICENSE-2.0\n" + 7:0x0007 TokenComment 281:284 8:1:0 "//\n" + 8:0x0008 TokenComment 284:355 9:1:0 "// Unless required by applicable law or agreed to in writing, software\n" + 9:0x0009 TokenComment 355:424 10:1:0 "// distributed under the License is distributed on an \"AS IS\" BASIS,\n" + 10:0x000a TokenComment 424:500 11:1:0 "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + 11:0x000b TokenComment 500:571 12:1:0 "// See the License for the specific language governing permissions and\n" + 12:0x000c TokenComment 571:605 13:1:0 "// limitations under the License.\n" + 13:0x000d TokenSpace 605:606 14:1:0 "\n" + 14:0x000e TokenIdent 606:613 15:1:0 "edition" + 15:0x000f TokenSpace 613:614 15:8:7 " " + 16:0x0010 TokenPunct 614:615 15:9:8 "=" + 17:0x0011 TokenSpace 615:616 15:10:9 " " + 18:0x0012 TokenNumber 616:620 15:11:10 2023 "2023" + 19:0x0013 TokenPunct 620:621 15:15:14 ";" + 20:0x0014 TokenSpace 621:623 15:16:15 "\n\n" + 21:0x0015 TokenIdent 623:630 17:1:0 "package" + 22:0x0016 TokenSpace 630:631 17:8:7 " " + 23:0x0017 TokenIdent 631:635 17:9:8 "test" + 24:0x0018 TokenPunct 635:636 17:13:12 ";" diff --git a/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.stderr b/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.stderr new file mode 100644 index 00000000..07a62774 --- /dev/null +++ b/experimental/ast/testdata/parser/syntax/unquoted_edition.proto.stderr @@ -0,0 +1,7 @@ +error: missing quotes around edition value + --> testdata/parser/syntax/unquoted_edition.proto:15:11 + | +15 | edition = 2023; + | ^^^^ help: wrap this in quotes + +encountered 1 error diff --git a/experimental/ast/to_proto.go b/experimental/ast/to_proto.go new file mode 100644 index 00000000..118f35f3 --- /dev/null +++ b/experimental/ast/to_proto.go @@ -0,0 +1,397 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + "reflect" + + compilerv1 "github.com/bufbuild/protocompile/internal/gen/buf/compiler/v1" + "google.golang.org/protobuf/proto" +) + +// FileToProto converts file into a Protobuf representation, which may be serialized. +// +// Note that the ast package does not support deserialization from this proto; instead, +// you will need to reparse the text file included in the message. This is because the +// AST is much richer than what is stored in this message; the message only provides +// enough information for further semantic analysis and diagnostic generation, but not +// for pretty-printing. +func FileToProto(file File) proto.Message { + return fileToProto(file) +} + +func fileToProto(file File) *compilerv1.File { + proto := &compilerv1.File{ + File: &compilerv1.Report_File{ + Path: file.Context().Path(), + Text: []byte(file.Context().Text()), + }, + } + file.Iter(func(_ int, d Decl) bool { + proto.Decls = append(proto.Decls, declToProto(d)) + return true + }) + return proto +} + +func spanToProto(s Spanner) *compilerv1.Span { + if s == nil { + return nil + } + + span := s.Span() + if span.Nil() { + return nil + } + + start, end := span.Offsets() + return &compilerv1.Span{ + Start: uint32(start), + End: uint32(end), + } +} + +func pathToProto(path Path) *compilerv1.Path { + if path.Nil() { + return nil + } + + proto := &compilerv1.Path{ + Span: spanToProto(path), + } + path.Components(func(c PathComponent) bool { + component := new(compilerv1.Path_Component) + switch c.Separator().Text() { + case ".": + component.Separator = compilerv1.Path_Component_SEPARATOR_DOT + case "/": + component.Separator = compilerv1.Path_Component_SEPARATOR_SLASH + } + component.SeparatorSpan = spanToProto(c.Separator()) + + if c.IsExtension() { + extn := c.AsExtension() + component.Component = &compilerv1.Path_Component_Extension{Extension: pathToProto(extn)} + component.ComponentSpan = spanToProto(extn) + } else { + ident := c.AsIdent() + component.Component = &compilerv1.Path_Component_Ident{Ident: ident.Name()} + component.ComponentSpan = spanToProto(ident) + } + + proto.Components = append(proto.Components, component) + return true + }) + return proto +} + +func declToProto(decl Decl) *compilerv1.Decl { + if decl == nil { + return nil + } + + switch decl := decl.(type) { + case DeclEmpty: + return &compilerv1.Decl{Decl: &compilerv1.Decl_Empty_{Empty: &compilerv1.Decl_Empty{ + Span: spanToProto(decl), + }}} + + case DeclSyntax: + var kind compilerv1.Decl_Syntax_Kind + if decl.IsSyntax() { + kind = compilerv1.Decl_Syntax_KIND_SYNTAX + } else if decl.IsEdition() { + kind = compilerv1.Decl_Syntax_KIND_EDITION + } + + return &compilerv1.Decl{Decl: &compilerv1.Decl_Syntax_{Syntax: &compilerv1.Decl_Syntax{ + Kind: kind, + Value: exprToProto(decl.Value()), + Options: optionsToProto(decl.Options()), + Span: spanToProto(decl), + KeywordSpan: spanToProto(decl.Keyword()), + EqualsSpan: spanToProto(decl.Equals()), + SemicolonSpan: spanToProto(decl.Semicolon()), + }}} + + case DeclPackage: + return &compilerv1.Decl{Decl: &compilerv1.Decl_Package_{Package: &compilerv1.Decl_Package{ + Path: pathToProto(decl.Path()), + Options: optionsToProto(decl.Options()), + Span: spanToProto(decl), + KeywordSpan: spanToProto(decl.Keyword()), + SemicolonSpan: spanToProto(decl.Semicolon()), + }}} + + case DeclImport: + var mod compilerv1.Decl_Import_Modifier + if decl.IsWeak() { + mod = compilerv1.Decl_Import_MODIFIER_WEAK + } else if decl.IsPublic() { + mod = compilerv1.Decl_Import_MODIFIER_PUBLIC + } + + return &compilerv1.Decl{Decl: &compilerv1.Decl_Import_{Import: &compilerv1.Decl_Import{ + Modifier: mod, + ImportPath: exprToProto(decl.ImportPath()), + Options: optionsToProto(decl.Options()), + Span: spanToProto(decl), + KeywordSpan: spanToProto(decl.Keyword()), + ModifierSpan: spanToProto(decl.Modifier()), + ImportPathSpan: spanToProto(decl.ImportPath()), + SemicolonSpan: spanToProto(decl.Semicolon()), + }}} + + case DeclScope: + proto := &compilerv1.Decl_Body{ + Span: spanToProto(decl), + } + decl.Iter(func(_ int, d Decl) bool { + proto.Decls = append(proto.Decls, declToProto(d)) + return true + }) + return &compilerv1.Decl{Decl: &compilerv1.Decl_Body_{Body: proto}} + + case DeclRange: + var kind compilerv1.Decl_Range_Kind + if decl.IsExtensions() { + kind = compilerv1.Decl_Range_KIND_EXTENSIONS + } else if decl.IsReserved() { + kind = compilerv1.Decl_Range_KIND_RESERVED + + } + + proto := &compilerv1.Decl_Range{ + Kind: kind, + Options: optionsToProto(decl.Options()), + Span: spanToProto(decl), + KeywordSpan: spanToProto(decl.Keyword()), + SemicolonSpan: spanToProto(decl.Semicolon()), + } + + decl.Iter(func(_ int, e Expr) bool { + proto.Ranges = append(proto.Ranges, exprToProto(e)) + return true + }) + + return &compilerv1.Decl{Decl: &compilerv1.Decl_Range_{Range: proto}} + + case DeclDef: + var kind compilerv1.Def_Kind + switch decl.Classify().(type) { + case DefMessage: + kind = compilerv1.Def_KIND_MESSAGE + case DefEnum: + kind = compilerv1.Def_KIND_ENUM + case DefService: + kind = compilerv1.Def_KIND_SERVICE + case DefExtend: + kind = compilerv1.Def_KIND_EXTEND + case DefField: + kind = compilerv1.Def_KIND_FIELD + case DefEnumValue: + kind = compilerv1.Def_KIND_ENUM_VALUE + case DefOneof: + kind = compilerv1.Def_KIND_ONEOF + case DefGroup: + kind = compilerv1.Def_KIND_GROUP + case DefMethod: + kind = compilerv1.Def_KIND_METHOD + case DefOption: + kind = compilerv1.Def_KIND_OPTION + } + + proto := &compilerv1.Def{ + Kind: kind, + Name: pathToProto(decl.Name()), + Value: exprToProto(decl.Value()), + Options: optionsToProto(decl.Options()), + Span: spanToProto(decl), + KeywordSpan: spanToProto(decl.Keyword()), + EqualsSpan: spanToProto(decl.Equals()), + SemicolonSpan: spanToProto(decl.Semicolon()), + } + + if kind == compilerv1.Def_KIND_FIELD || kind == compilerv1.Def_KIND_UNSPECIFIED { + proto.Type = typeToProto(decl.Type()) + } + + if signature := decl.Signature(); !signature.Nil() { + proto.Signature = &compilerv1.Def_Signature{ + Span: spanToProto(signature), + InputSpan: spanToProto(signature.Inputs()), + ReturnsSpan: spanToProto(signature.Returns()), + OutputSpan: spanToProto(signature.Outputs()), + } + + signature.Inputs().Iter(func(_ int, t Type) bool { + proto.Signature.Inputs = append(proto.Signature.Inputs, typeToProto(t)) + return true + }) + signature.Outputs().Iter(func(_ int, t Type) bool { + proto.Signature.Outputs = append(proto.Signature.Outputs, typeToProto(t)) + return true + }) + } + + if body := decl.Body(); !body.Nil() { + proto.Body = &compilerv1.Decl_Body{ + Span: spanToProto(decl.Body()), + } + body.Iter(func(_ int, d Decl) bool { + proto.Body.Decls = append(proto.Body.Decls, declToProto(d)) + return true + }) + } + + return &compilerv1.Decl{Decl: &compilerv1.Decl_Def{Def: proto}} + } + + panic(fmt.Sprint("typeToProto: unknown Decl implementation:", reflect.TypeOf(expr))) +} + +func optionsToProto(options CompactOptions) *compilerv1.Options { + if options.Nil() { + return nil + } + + proto := &compilerv1.Options{ + Span: spanToProto(options), + } + + options.Iter(func(_ int, o Option) bool { + proto.Entries = append(proto.Entries, &compilerv1.Options_Entry{ + Path: pathToProto(o.Path), + Value: exprToProto(o.Value), + EqualsSpan: spanToProto(o.Equals), + }) + return true + }) + + return proto +} + +func exprToProto(expr Expr) *compilerv1.Expr { + if expr == nil { + return nil + } + + switch expr := expr.(type) { + case ExprLiteral: + proto := &compilerv1.Expr_Literal{ + Span: spanToProto(expr), + } + if v, ok := expr.Token.AsInt(); ok { + proto.Value = &compilerv1.Expr_Literal_IntValue{IntValue: v} + } else if v, ok := expr.Token.AsFloat(); ok { + proto.Value = &compilerv1.Expr_Literal_FloatValue{FloatValue: v} + } else if v, ok := expr.Token.AsString(); ok { + proto.Value = &compilerv1.Expr_Literal_StringValue{StringValue: v} + } + return &compilerv1.Expr{Expr: &compilerv1.Expr_Literal_{Literal: proto}} + + case ExprPath: + return &compilerv1.Expr{Expr: &compilerv1.Expr_Path{Path: pathToProto(expr.Path)}} + + case ExprPrefixed: + return &compilerv1.Expr{Expr: &compilerv1.Expr_Prefixed_{Prefixed: &compilerv1.Expr_Prefixed{ + Prefix: compilerv1.Expr_Prefixed_Prefix(expr.Prefix()), + Expr: exprToProto(expr.Expr()), + Span: spanToProto(expr), + PrefixSpan: spanToProto(expr.PrefixToken()), + }}} + + case ExprRange: + start, end := expr.Bounds() + return &compilerv1.Expr{Expr: &compilerv1.Expr_Range_{Range: &compilerv1.Expr_Range{ + Start: exprToProto(start), + End: exprToProto(end), + Span: spanToProto(expr), + ToSpan: spanToProto(expr.Keyword()), + }}} + + case ExprArray: + proto := &compilerv1.Expr_Array{ + Span: spanToProto(expr), + } + expr.Iter(func(_ int, e Expr) bool { + proto.Elements = append(proto.Elements, exprToProto(e)) + return true + }) + return &compilerv1.Expr{Expr: &compilerv1.Expr_Array_{Array: proto}} + + case ExprDict: + proto := &compilerv1.Expr_Dict{ + Span: spanToProto(expr), + } + expr.Iter(func(_ int, e ExprKV) bool { + proto.Entries = append(proto.Entries, exprKVToProto(e)) + return true + }) + return &compilerv1.Expr{Expr: &compilerv1.Expr_Dict_{Dict: proto}} + + case ExprKV: + return &compilerv1.Expr{Expr: &compilerv1.Expr_Kv_{Kv: exprKVToProto(expr)}} + } + + panic(fmt.Sprint("typeToProto: unknown Expr implementation:", reflect.TypeOf(expr))) +} + +func exprKVToProto(expr ExprKV) *compilerv1.Expr_Kv { + if expr.Nil() { + return nil + } + + return &compilerv1.Expr_Kv{ + Key: exprToProto(expr.Key()), + Value: exprToProto(expr.Value()), + Span: spanToProto(expr), + ColonSpan: spanToProto(expr.Colon()), + } +} + +func typeToProto(ty Type) *compilerv1.Type { + if ty == nil { + return nil + } + + switch ty := ty.(type) { + case TypePath: + return &compilerv1.Type{Type: &compilerv1.Type_Path{Path: pathToProto(ty.Path)}} + + case TypePrefixed: + return &compilerv1.Type{Type: &compilerv1.Type_Prefixed_{Prefixed: &compilerv1.Type_Prefixed{ + Prefix: compilerv1.Type_Prefixed_Prefix(ty.Prefix()), + Type: typeToProto(ty.Type()), + Span: spanToProto(ty), + PrefixSpan: spanToProto(ty.PrefixToken()), + }}} + + case TypeGeneric: + generic := &compilerv1.Type_Generic{ + Path: pathToProto(ty.Path()), + Span: spanToProto(ty), + BracketSpan: spanToProto(ty.Args()), + } + ty.Args().Iter(func(_ int, t Type) bool { + generic.Args = append(generic.Args, typeToProto(t)) + return true + }) + return &compilerv1.Type{Type: &compilerv1.Type_Generic_{Generic: generic}} + } + + panic(fmt.Sprint("typeToProto: unknown Type implementation:", reflect.TypeOf(ty))) +} diff --git a/experimental/ast/token.go b/experimental/ast/token.go new file mode 100644 index 00000000..307921d7 --- /dev/null +++ b/experimental/ast/token.go @@ -0,0 +1,652 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + "strconv" + "strings" + "unicode" +) + +// Constants for extracting the parts of tokenImpl.kindAndOffset +const ( + tokenKindMask = 0b111 + tokenOffsetShift = 3 +) + +const ( + TokenUnrecognized TokenKind = iota // Unrecognized garbage in the input file. + + TokenSpace // Non-comment contiguous whitespace. + TokenComment // A single comment. + TokenIdent // An identifier. + TokenString // A string token. May be a non-leaf for non-contiguous quoted strings. + TokenNumber // A run of digits that is some kind of number. + TokenPunct // Some punctuation. May be a non-leaf for delimiters like {}. + _TokenUnused // Reserved for future use. + + // DO NOT ADD MORE TOKEN KINDS: ONLY THREE BITS ARE AVAILABLE + // TO STORE THEM. +) + +// TokenKind identifies what kind of token a particular [Token] is. +type TokenKind byte + +// IsSkippable returns whether this is a token that should be examined during +// syntactic analysis. +func (t TokenKind) IsSkippable() bool { + return t == TokenSpace || t == TokenComment || t == TokenUnrecognized +} + +// String implements [strings.Stringer] for TokenKind. +func (t TokenKind) String() string { + switch t { + case TokenUnrecognized: + return "TokenUnrecognized" + case TokenSpace: + return "TokenSpace" + case TokenComment: + return "TokenComment" + case TokenIdent: + return "TokenIdent" + case TokenString: + return "TokenString" + case TokenNumber: + return "TokenNumber" + case TokenPunct: + return "TokenPunct" + default: + return fmt.Sprintf("TokenKind(%d)", int(t)) + } +} + +// Token is a lexical element of a Protobuf file. +// +// Protocompile's token stream is actually a tree of tokens. Some tokens, called +// non-leaf tokens, contain a selection of tokens "within" them. For example, the +// two matched braces of a message body are a single token, and all of the tokens +// between the braces are contained inside it. This moves certain complexity into +// the lexer in a way that allows us to handle matching delimiters generically. +// +// The zero value of Token is the so-called "nil token", which is used to denote the +// absence of a token. +type Token struct { + withContext + + raw rawToken +} + +// IsPaired returns whether this is a non-nil leaf token. +func (t Token) IsLeaf() bool { + if t.Nil() { + return false + } + + if impl := t.impl(); impl != nil { + return impl.IsLeaf() + } + return t.synthetic().IsLeaf() +} + +// IsSynthetic returns whether this is a non-nil synthetic token (i.e., a token that didn't +// come from a parsing operation.) +func (t Token) IsSynthetic() bool { + return t.raw < 0 +} + +// Kind returns what kind of token this is. +// +// Returns [TokenUnrecognized] if this token is nil. +func (t Token) Kind() TokenKind { + if t.Nil() { + return TokenUnrecognized + } + + if impl := t.impl(); impl != nil { + return impl.Kind() + } + return t.synthetic().kind +} + +// Text returns the text fragment referred to by this token. +// Note that this DOES NOT include any child tokens! +// +// Returns empty string fot the nil token. +func (t Token) Text() string { + if t.Nil() { + return "" + } + + if synth := t.synthetic(); synth != nil { + if synth.kind == TokenString { + // If this is a string, we need to add quotes and escape it. + // This can be done on-demand. + + var escaped strings.Builder + escaped.WriteRune('"') + for _, r := range synth.text { + switch { + case r == '\n': + escaped.WriteString("\\n") + case r == '\r': + escaped.WriteString("\\r") + case r == '\t': + escaped.WriteString("\\t") + case r == '\x00': + escaped.WriteString("\\0") + case r == '"': + escaped.WriteString("\\\"") + case r == '\\': + escaped.WriteString("\\\\") + case r < ' ': + fmt.Fprintf(&escaped, "\\x%02x", r) + case unicode.IsGraphic(r): + escaped.WriteRune(r) + case r < 0x10000: + fmt.Fprintf(&escaped, "\\u%04x", r) + default: + fmt.Fprintf(&escaped, "\\U%08x", r) + } + } + escaped.WriteRune('"') + return escaped.String() + } + + return synth.text + } + + start, end := t.offsets() + return t.Context().Text()[start:end] +} + +// Span implements [Spanner] for Token. +func (t Token) Span() Span { + if t.Nil() || t.IsSynthetic() { + return Span{} + } + + if !t.IsLeaf() { + start, end := t.StartEnd() + a, _ := start.offsets() + _, b := end.offsets() + + return t.Context().NewSpan(a, b) + } + + return t.Context().NewSpan(t.offsets()) +} + +// StartEnd returns the open and close tokens for this token. +// +// If this is a leaf token, start and end will be the same token and will compare as equal. +// +// Panics if this is a nil token. +func (t Token) StartEnd() (start, end Token) { + t.panicIfNil() + + switch impl := t.impl(); { + case impl == nil: + switch synth := t.synthetic(); { + case synth.IsLeaf(): + return t, t + case synth.IsOpen(): + start = t + end = synth.otherEnd.With(t) + case synth.IsClose(): + start = synth.otherEnd.With(t) + end = t + } + + case impl.IsLeaf(): + return t, t + case impl.IsOpen(): + start = t + end = (t.raw + rawToken(impl.Offset())).With(t) + case impl.IsClose(): + start = (t.raw + rawToken(impl.Offset())).With(t) + end = t + } + + return +} + +// Offsets returns the byte offsets of this token within the file it came from. +// +// The return value for synthetic tokens is unspecified. +// +// Note that this DOES NOT include any child tokens! +func (t Token) offsets() (start, end int) { + if t.IsSynthetic() { + return + } + + end = int(t.impl().end) + // If this is the first token, the start is implicitly zero. + if t.raw == 1 { + return 0, end + } + + prev := (t.raw - 1).With(t) + return int(prev.impl().end), end +} + +// Children returns a Cursor over the children of this token. +// +// If the token is nil or is a leaf token, returns nil. +func (t Token) Children() *Cursor { + if t.Nil() || t.IsLeaf() { + return nil + } + + if impl := t.impl(); impl != nil { + start, end := t.StartEnd() + return &Cursor{ + withContext: t.withContext, + start: start.raw + 1, // Skip the start! + end: end.raw, + } + } else { + synth := t.synthetic() + if synth.IsClose() { + return synth.otherEnd.With(t).Children() + } + + return &Cursor{ + withContext: t.withContext, + stream: synth.children, + } + } +} + +// Name converts this token into its corresponding identifier name, potentially +// performing normalization. +// +// Currently, we perform no normalization, so this is the same value as Text(), but +// that may change in the future. +// +// Returns "" for non-identifiers. +func (t Token) Name() string { + if t.Kind() != TokenIdent { + return "" + } + return t.Text() +} + +// AsUInt converts this token into an unsigned integer if it is a numeric token. +// bits is the maximum number of bits that are used to represent this value. +// +// Otherwise, or if the result would overflow, returns 0, false. +func (t Token) AsInt() (uint64, bool) { + if t.Kind() != TokenNumber { + return 0, false + } + + // Check if this number has already been parsed for us. + any, present := t.Context().literals[t.raw] + if v, ok := any.(uint64); present && ok { + return v, true + } + + // Otherwise, it's an base 10 integer. + v, err := strconv.ParseUint(t.Text(), 10, 64) + return v, err == nil +} + +// AsFloat converts this token into float if it is a numeric token. If the value is +// not precisely representable as a float64, it is clamped to an infinity or +// rounded (ties-to-even). +// +// This function does not handle the special non-finite values inf and nan. +// +// Otherwise, returns 0.0, false. +func (t Token) AsFloat() (float64, bool) { + if t.Kind() != TokenNumber { + return 0, false + } + + // Check if this number has already been parsed for us. + any, present := t.Context().literals[t.raw] + if v, ok := any.(float64); present && ok { + return v, true + } + if v, ok := any.(uint64); present && ok { + return float64(v), true + } + + // Otherwise, it's an base 10 integer. + v, err := strconv.ParseUint(t.Text(), 10, 64) + return float64(v), err == nil +} + +// AsString converts this token into a Go string if it is in fact a string literal token. +// +// Otherwise, returns "", false. +func (t Token) AsString() (string, bool) { + if t.Kind() != TokenString { + return "", false + } + + // Synthetic strings don't have quotes around them and don't + // contain escapes. + if synth := t.synthetic(); synth != nil { + return synth.text, true + } + + // Check if there's an unescaped version of this string. + any, present := t.Context().literals[t.raw] + if unescaped, ok := any.(string); present && ok { + return unescaped, true + } + + // If it's not in the map, that means this is a single + // leaf string whose quotes we can just pull of off the + // token, after removing the quotes. + text := t.Text() + if len(text) < 2 { + // Some kind of invalid, unterminated string token. + return "", true + } + return text[1 : len(text)-1], true +} + +// IsPureString returns whether this token was parsed from a string literal +// that did not need post-processing after being parsed. +// +// Returns false for synthetic tokens. +func (t Token) IsPureString() bool { + if t.IsSynthetic() || t.Kind() != TokenString { + return false + } + _, present := t.Context().literals[t.raw] + return !present +} + +// String implements [strings.Stringer] for Token. +func (t Token) String() string { + return t.raw.String() +} + +func (t Token) impl() *tokenImpl { + t.panicIfNil() + + if t.IsSynthetic() { + return nil + } + // Need to subtract off one, because the zeroth + // rawToken is used as a "missing" sentinel. + return &t.ctx.stream[t.raw-1] +} + +func (t Token) synthetic() *tokenSynthetic { + t.panicIfNil() + + if !t.IsSynthetic() { + return nil + } + return &t.ctx.syntheticTokens[^t.raw] +} + +// Cursor is an iterator-like construct for looping over a token tree. +// Unlike a plain range func, it supports peeking. +type Cursor struct { + withContext + + // This is used if this is a cursor over non-synthetic tokens. + // start is inclusive, end is exclusive. start == end means the stream + // is empty. + start, end rawToken + // This is used if this is a cursor over the children of a synthetic token. + // If stream is nil, we know we're in the non-synthetic case. + stream []rawToken + idx int +} + +// CursorMark is the return value of [Cursor.Mark], which marks a position on +// a Cursor for rewinding to. +type CursorMark struct { + // This contains exactly the values needed to rewind the cursor. + owner *Cursor + start rawToken + idx int +} + +// Done returns whether or not there are still tokens left to yield. +func (c *Cursor) Done() bool { + return c.Peek().Nil() +} + +// Mark makes a mark on this cursor to indicate a place that can be rewound +// to. +func (c *Cursor) Mark() CursorMark { + return CursorMark{ + owner: c, + start: c.start, + idx: c.idx, + } +} + +// Rewind moves this cursor back to the position described by Rewind. +// +// Panics if mark was not created using this cursor's Mark method. +func (c *Cursor) Rewind(mark CursorMark) { + if c != mark.owner { + panic("protocompile/ast: rewound cursor using the wrong cursor's mark") + } + c.start = mark.start + c.idx = mark.idx +} + +// ensureProgress panics if this cursor has not advanced past the current mark. +func (c *Cursor) ensureProgress(mark CursorMark) { + if mark.owner == nil { + return + } + if mark.start == c.start && mark.idx == c.idx { + panic(fmt.Sprintf("protocompile/ast: failed to make progress at offset %d; this is a bug in protocompile", c.start)) + } +} + +// Peek returns the next token in the sequence, if there is one. +// This may return a skippable token. +// +// Returns the nil token if this cursor is at the end of the stream. +func (c *Cursor) PeekSkippable() Token { + if c == nil { + return Token{} + } + + if c.IsSynthetic() { + if c.idx == len(c.stream) { + return Token{} + } + return c.stream[c.idx].With(c) + } + if c.start >= c.end { + return Token{} + } + return c.start.With(c) +} + +// Pop returns the next skippable token in the sequence, and advances the cursor. +func (c *Cursor) PopSkippable() Token { + tok := c.PeekSkippable() + if tok.Nil() { + return tok + } + + if c.IsSynthetic() { + c.idx++ + } else { + impl := c.start.With(c).impl() + if impl.Offset() > 0 { + c.start += rawToken(impl.Offset()) + } + c.start++ + } + return tok +} + +// Peek returns the next token in the sequence, if there is one. +// This automatically skips past skippable tokens. +// +// Returns the nil token if this cursor is at the end of the stream. +func (c *Cursor) Peek() Token { + for { + next := c.PeekSkippable() + if next.Nil() || !next.Kind().IsSkippable() { + return next + } + c.PopSkippable() + } +} + +// Pop returns the next token in the sequence, and advances the cursor. +func (c *Cursor) Pop() Token { + tok := c.Peek() + if tok.Nil() { + return tok + } + + return c.PopSkippable() +} + +// Iter is an iterator over the remaining tokens in the cursor. +// +// Note that breaking out of a loop over this iterator, and starting +// a new loop, will resume at the iteration that was broken at. E.g., if +// we break out of a loop over c.Iter at token tok, and start a new range +// over c.Iter, the first yielded token will be tok. +func (c *Cursor) Iter(yield func(Token) bool) { + for { + tok := c.Peek() + if tok.Nil() || !yield(tok) { + break + } + _ = c.Pop() + } +} + +// IterSkippable is like [Cursor.Iter]. but it yields skippable tokens, too. +// +// Note that breaking out of a loop over this iterator, and starting +// a new loop, will resume at the iteration that was broken at. E.g., if +// we break out of a loop over c.Iter at token tok, and start a new range +// over c.Iter, the first yielded token will be tok. +func (c *Cursor) IterSkippable(yield func(Token) bool) { + for { + tok := c.PeekSkippable() + if tok.Nil() || !yield(tok) { + break + } + _ = c.PopSkippable() + } +} + +// IsSynthetic returns whether this is a cursor over synthetic tokens. +func (c *Cursor) IsSynthetic() bool { + return c.stream != nil +} + +// ** PRIVATE ** // + +// rawToken is the ID of a token separated from its context. +// +// Let n := int(id). If n is zero, it is the nil token. If n is positive, it is +// a non-synthetic token, whose index is n - 1. If it is negative, it is a +// synthetic token, whose index is ^n. +type rawToken int32 + +// Wrap wraps this rawToken with a context to present to the user. +func (t rawToken) With(c Contextual) Token { + if t == 0 { + return Token{} + } + return Token{withContext{c.Context()}, t} +} + +func (t rawToken) String() string { + if t == 0 { + return "Token()" + } + if t < 0 { + return fmt.Sprintf("Token(synth#%d)", ^int(t)) + } + + return fmt.Sprintf("Token(%d)", int(t)-1) +} + +// tokenImpl is the data of a token stored in a [Context]. +type tokenImpl struct { + // We store the end of the token, and the start is implicitly + // given by the end of the previous token. We use the end, rather + // than the start, it makes adding tokens one by one to the stream + // easier, because once the token is pushed, its start and end are + // set correctly, and don't depend on the next token being pushed. + end uint32 + kindAndOffset int32 +} + +// Kind extracts the token's kind, which is stored +func (t tokenImpl) Kind() TokenKind { + return TokenKind(t.kindAndOffset & tokenKindMask) +} + +// Offset returns the offset from this token to its matching open/close, if any. +func (t tokenImpl) Offset() int { + return int(t.kindAndOffset >> tokenOffsetShift) +} + +// IsLeaf checks whether this is a leaf token. +func (t tokenImpl) IsLeaf() bool { + return t.Offset() == 0 +} + +// IsLeaf checks whether this is a open token with a matching closer. +func (t tokenImpl) IsOpen() bool { + return t.Offset() > 0 +} + +// IsLeaf checks whether this is a closer token with a matching opener. +func (t tokenImpl) IsClose() bool { + return t.Offset() < 0 +} + +// tokenSynthetic is the data of a synthetic token stored in a [Context]. +type tokenSynthetic struct { + text string + kind TokenKind + + // Non-zero if this token has a matching other end. Whether this is + // the opener or the closer is determined by whether children is + // nil: it is nil for the closer. + otherEnd rawToken + children []rawToken +} + +// IsLeaf checks whether this is a leaf token. +func (t tokenSynthetic) IsLeaf() bool { + return t.otherEnd == 0 +} + +// IsLeaf checks whether this is a open token with a matching closer. +func (t tokenSynthetic) IsOpen() bool { + return !t.IsLeaf() && t.children != nil +} + +// IsLeaf checks whether this is a closer token with a matching opener. +func (t tokenSynthetic) IsClose() bool { + return !t.IsLeaf() && t.children == nil +} diff --git a/experimental/ast/token_test.go b/experimental/ast/token_test.go new file mode 100644 index 00000000..12496406 --- /dev/null +++ b/experimental/ast/token_test.go @@ -0,0 +1,162 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "testing" + + "github.com/bufbuild/protocompile/experimental/report" + "github.com/stretchr/testify/assert" +) + +func TestNilToken(t *testing.T) { + assert := assert.New(t) + + var n Token + assert.True(n.Nil()) + assert.False(n.IsLeaf()) + assert.False(n.IsSynthetic()) + assert.Equal(n.Kind(), TokenUnrecognized) +} + +func TestLeafTokens(t *testing.T) { + assert := assert.New(t) + + ctx := newContext(report.File{Path: "test", Text: "abc def ghi"}) + + abc := ctx.PushToken(3, TokenIdent) + ctx.PushToken(1, TokenSpace) + def := ctx.PushToken(3, TokenIdent) + ctx.PushToken(1, TokenSpace) + ghi := ctx.PushToken(3, TokenIdent) + + assertIdent := func(tok Token, a, b int, text string) { + start, end := tok.Span().Offsets() + assert.Equal(a, start) + assert.Equal(b, end) + + assert.False(tok.Nil()) + assert.False(tok.IsSynthetic()) + assert.True(tok.IsLeaf()) + assert.Equal(text, tok.Text()) + assert.Equal(TokenIdent, abc.Kind()) + tokensEq(t, collect(tok.Children().Iter)) + } + + assertIdent(abc, 0, 3, "abc") + assertIdent(def, 4, 7, "def") + assertIdent(ghi, 8, 11, "ghi") + + jkl := ctx.NewIdent("jkl") + assert.False(jkl.Nil()) + assert.True(jkl.IsLeaf()) + assert.True(jkl.IsSynthetic()) + assert.Equal("jkl", jkl.Text()) + tokensEq(t, collect(jkl.Children().Iter)) +} + +func TestTreeTokens(t *testing.T) { + assert := assert.New(t) + + ctx := newContext(report.File{Path: "test", Text: "abc(def(x), ghi)"}) + + _ = ctx.PushToken(3, TokenIdent) + open := ctx.PushToken(1, TokenPunct) + def := ctx.PushToken(3, TokenIdent) + open2 := ctx.PushToken(1, TokenPunct) + x := ctx.PushToken(1, TokenIdent) + close2 := ctx.PushToken(1, TokenPunct) + ctx.FuseTokens(open2, close2) + comma := ctx.PushToken(1, TokenPunct) + space := ctx.PushToken(1, TokenSpace) + ghi := ctx.PushToken(3, TokenIdent) + close := ctx.PushToken(1, TokenPunct) + ctx.FuseTokens(open, close) + + _ = space + + assert.True(!open.IsLeaf()) + assert.True(!open2.IsLeaf()) + assert.True(!close.IsLeaf()) + assert.True(!close2.IsLeaf()) + + assert.Equal(TokenPunct, open.Kind()) + assert.Equal(TokenPunct, close.Kind()) + assert.Equal(TokenPunct, open2.Kind()) + assert.Equal(TokenPunct, close2.Kind()) + + start, end := open2.StartEnd() + tokenEq(t, start, open2) + tokenEq(t, end, close2) + start, end = close2.StartEnd() + tokenEq(t, start, open2) + tokenEq(t, end, close2) + + start, end = open.StartEnd() + tokenEq(t, start, open) + tokenEq(t, end, close) + start, end = close.StartEnd() + tokenEq(t, start, open) + tokenEq(t, end, close) + + tokensEq(t, collect(open2.Children().Iter), x) + tokensEq(t, collect(close2.Children().Iter), x) + + tokensEq(t, collect(open.Children().Iter), def, open2, comma, ghi) + tokensEq(t, collect(close.Children().Iter), def, open2, comma, ghi) + + open3 := ctx.NewPunct("(") + close3 := ctx.NewPunct(")") + ctx.NewOpenClose(open3, close3, def, open2) + + assert.True(!open3.IsLeaf()) + assert.True(!close3.IsLeaf()) + start, end = open3.StartEnd() + tokenEq(t, start, open3) + tokenEq(t, end, close3) + start, end = close3.StartEnd() + tokenEq(t, start, open3) + tokenEq(t, end, close3) + + tokensEq(t, collect(open3.Children().Iter), def, open2) + tokensEq(t, collect(close3.Children().Iter), def, open2) +} + +// tokenEq is the singular version of tokensEq. +func tokenEq(t *testing.T, a, b Token) { + tokensEq(t, []Token{a}, b) +} + +// tokensEq is a helper for comparing tokens that results in more readable printouts. +func tokensEq(t *testing.T, tokens []Token, expected ...Token) { + a := make([]string, len(tokens)) + for i, t := range tokens { + a[i] = t.String() + } + b := make([]string, len(expected)) + for i, t := range expected { + b[i] = t.String() + } + assert.Equal(t, b, a) +} + +// collect is a polyfill for [slices.Collect]. +func collect[T any](iter func(func(T) bool)) (s []T) { + iter(func(t T) bool { + s = append(s, t) + return true + }) + return +} diff --git a/experimental/ast/type.go b/experimental/ast/type.go new file mode 100644 index 00000000..c61a2d11 --- /dev/null +++ b/experimental/ast/type.go @@ -0,0 +1,373 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + "slices" + + "github.com/bufbuild/protocompile/internal/arena" +) + +const ( + typePath typeKind = iota + 1 + typeModified + typeGeneric +) + +const ( + TypePrefixUnknown TypePrefix = iota + TypePrefixOptional + TypePrefixRepeated + TypePrefixRequired + + // This is the "stream Foo.bar" syntax of RPC methods. It is also treated as + // a prefix. + TypePrefixStream +) + +// TypePrefix is a prefix for a type, such as required, optional, or repeated. +type TypePrefix int8 + +type typeKind int8 + +// TypePrefixByName looks up a prefix kind by name. +// +// If name is not a known prefix, returns [TypePrefixUnknown]. +func TypePrefixByName(name string) TypePrefix { + switch name { + case "optional": + return TypePrefixOptional + case "repeated": + return TypePrefixRepeated + case "required": + return TypePrefixRequired + case "stream": + return TypePrefixStream + default: + return TypePrefixUnknown + } +} + +// String implements [strings.Stringer] for Modifier. +func (m TypePrefix) String() string { + switch m { + case TypePrefixUnknown: + return "unknown" + case TypePrefixOptional: + return "optional" + case TypePrefixRepeated: + return "repeated" + case TypePrefixRequired: + return "required" + case TypePrefixStream: + return "stream" + default: + return fmt.Sprintf("modifier%d", int(m)) + } +} + +// Type is the type of a field or service method. +// +// In the Protocompile AST, we regard many things as types for the sake of diagnostics. +// For example, "optional string" is a type, but so is the invalid type +// "optional repeated string". +// +// This is implemented by types in this package of the form Type*. +type Type interface { + Spanner + + typeKind() typeKind + typeIndex() arena.Untyped +} + +// types is storage for every kind of Type in a Context. +type types struct { + modifieds arena.Arena[rawPrefixed] + generics arena.Arena[rawGeneric] +} + +func (TypePath) typeKind() typeKind { return typePath } +func (TypePrefixed) typeKind() typeKind { return typeModified } +func (TypeGeneric) typeKind() typeKind { return typeGeneric } + +func (TypePath) typeIndex() arena.Untyped { return 0 } +func (t TypePrefixed) typeIndex() arena.Untyped { return t.ptr } +func (t TypeGeneric) typeIndex() arena.Untyped { return t.ptr } + +// TypePath is a type that is a simple path reference. +type TypePath struct { + // The path that refers to this type. + Path +} + +var _ Type = TypePath{} + +// TypePrefixed is a type with a [TypePrefix]. +// +// Unlike in ordinary Protobuf, the Protocompile AST permits arbitrary nesting +// of modifiers. +type TypePrefixed struct { + withContext + + ptr arena.Untyped + raw *rawPrefixed +} + +type rawPrefixed struct { + prefix rawToken + ty rawType +} + +// TypePrefixedArgs is the arguments for [Context.NewTypePrefixed]. +type TypePrefixedArgs struct { + Prefix Token + Type Type +} + +var _ Type = TypePrefixed{} + +// Prefix extracts the modifier out of this type. +// +// Returns [TypePrefixUnknown] if [TypePrefixed.PrefixToken] does not contain +// a known modifier. +func (t TypePrefixed) Prefix() TypePrefix { + return TypePrefixByName(t.PrefixToken().Text()) +} + +// PrefixToken returns the token representing this type's prefix. +func (t TypePrefixed) PrefixToken() Token { + return t.raw.prefix.With(t) +} + +// Type returns the type that is being prefixed. +func (t TypePrefixed) Type() Type { + return t.raw.ty.With(t) +} + +// SetType sets the expression that is being prefixed. +// +// If passed nil, this clears the type. +func (t TypePrefixed) SetType(ty Type) { + t.raw.ty = toRawType(ty) +} + +// Span implements [Spanner] for TypePrefixed. +func (t TypePrefixed) Span() Span { + return JoinSpans(t.PrefixToken(), t.Type()) +} + +// TypeGeneric is a type with generic arguments. +// +// Protobuf does not have generics... mostly. It has the map production, +// which looks like something that generalizes, but doesn't. It is useful to parse +// when users mistakenly think this generalizes or provide the incorrect number +// of arguments. +// +// You will usually want to immediately call [TypeGeneric.Map] to codify the assumption +// that all generic types understood by your code are maps. +// +// TypeGeneric implements [Commas[Type]] for accessing its arguments. +type TypeGeneric struct { + withContext + + ptr arena.Untyped + raw *rawGeneric +} + +type rawGeneric struct { + path rawPath + args rawTypeList +} + +// TypeGenericArgs is the arguments for [Context.NewTypeGeneric]. +// +// Generic arguments should be added after construction with [TypeGeneric.AppendComma]. +type TypeGenericArgs struct { + Path Path + AngleBrackets Token +} + +var _ Type = TypeGeneric{} + +// Path returns the path of the "type constructor". For example, for +// my.Map, this would return the path my.Map. +func (t TypeGeneric) Path() Path { + return t.raw.path.With(t) +} + +// AsMap extracts the key/value types out of this generic type, checking that it's actually a +// map. This is intended for asserting the extremely common case of "the only generic +// type is map". +// +// Returns nils if this is not a map, or it has the wrong number of generic arguments. +func (t TypeGeneric) AsMap() (key, value Type) { + if t.Path().AsBuiltin() != BuiltinMap || t.Args().Len() != 2 { + return nil, nil + } + + return t.Args().At(0), t.Args().At(1) +} + +// Args returns the argument list for this generic type. +func (t TypeGeneric) Args() TypeList { + return TypeList{ + t.withContext, + &t.raw.args, + } +} + +// Span implements [Spanner] for TypeGeneric. +func (t TypeGeneric) Span() Span { + return JoinSpans(t.Path(), t.Args()) +} + +// TypeList is a [Commas] over a list of types surrounded by some kind of brackets. +// +// Despite the name, TypeList does not implement [Type] because it is not a type. +type TypeList struct { + withContext + + raw *rawTypeList +} + +var ( + _ Commas[Type] = TypeList{} + _ Spanner = TypeList{} +) + +type rawTypeList struct { + brackets rawToken + args []struct { + ty rawType + comma rawToken + } +} + +// Brackets returns the token tree for the brackets wrapping the argument list. +// +// May be nil, if the user forgot to include brackets. +func (d TypeList) Brackets() Token { + return d.raw.brackets.With(d) +} + +// Len implements [Slice] for MethodTypes. +func (d TypeList) Len() int { + return len(d.raw.args) +} + +// At implements [Slice] for MethodTypes. +func (d TypeList) At(n int) Type { + return d.raw.args[n].ty.With(d) +} + +// At implements [Iter] for MethodTypes. +func (d TypeList) Iter(yield func(int, Type) bool) { + for i, arg := range d.raw.args { + if !yield(i, arg.ty.With(d)) { + break + } + } +} + +// Append implements [Inserter] for TypeGeneric. +func (d TypeList) Append(ty Type) { + d.InsertComma(d.Len(), ty, Token{}) +} + +// Insert implements [Inserter] for TypeGeneric. +func (d TypeList) Insert(n int, ty Type) { + d.InsertComma(n, ty, Token{}) +} + +// Delete implements [Inserter] for TypeGeneric. +func (d TypeList) Delete(n int) { + d.raw.args = slices.Delete(d.raw.args, n, n+1) +} + +// Comma implements [Commas] for MethodTypes. +func (d TypeList) Comma(n int) Token { + return d.raw.args[n].comma.With(d) +} + +// AppendComma implements [Commas] for MethodTypes. +func (d TypeList) AppendComma(ty Type, comma Token) { + d.InsertComma(d.Len(), ty, comma) +} + +// InsertComma implements [Commas] for MethodTypes. +func (d TypeList) InsertComma(n int, ty Type, comma Token) { + d.Context().panicIfNotOurs(ty, comma) + + d.raw.args = slices.Insert(d.raw.args, n, struct { + ty rawType + comma rawToken + }{toRawType(ty), comma.raw}) +} + +// Span implements [Spanner] for MethodTypes. +func (d TypeList) Span() Span { + if !d.Brackets().Nil() { + return d.Brackets().Span() + } + + var span Span + for _, arg := range d.raw.args { + span = JoinSpans(span, arg.ty.With(d), arg.comma.With(d)) + } + return span +} + +// rawType is the raw representation of a type. +// +// The vast, vast majority of types are paths. To avoid needing to waste +// space for such types, we use the following encoding for rawType. +// +// First, note that if the first half of a rawPath is negative, the other +// must be zero. Thus, if the first "token" of the rawPath is negative and +// the second is not, the first is ^typeKind and the second is an index +// into a table in a Context. Otherwise, it's a path type. This logic is +// implemented in With(). +type rawType rawPath + +func toRawType(t Type) rawType { + if t == nil { + return rawType{} + } + if path, ok := t.(TypePath); ok { + return rawType(path.Path.raw) + } + return rawType{^rawToken(t.typeKind()), rawToken(t.typeIndex())} +} + +func (t rawType) With(c Contextual) Type { + if t[0] == 0 && t[1] == 0 { + return nil + } + + if t[0] < 0 && t[1] != 0 { + c := c.Context() + ptr := arena.Untyped(t[1]) + switch typeKind(^t[0]) { + case typeModified: + return TypePrefixed{withContext{c}, ptr, c.types.modifieds.At(ptr)} + case typeGeneric: + return TypeGeneric{withContext{c}, ptr, c.types.generics.At(ptr)} + default: + panic(fmt.Sprintf("protocompile/ast: invalid typeKind: %d", ^t[0])) + } + } + return TypePath{rawPath(t).With(c)} +} diff --git a/experimental/report/doc.go b/experimental/report/doc.go new file mode 100644 index 00000000..5b53be7c --- /dev/null +++ b/experimental/report/doc.go @@ -0,0 +1,110 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* +package report provides a robust diagnostics framework. It offers diagnostic +construction, interchange, and ASCII art rendering functionality. + +Diagnostics are collected into a [Report], which is a helpful builder over +a slice of [Diagnostic]s. Each [Diagnostic] consists of a Go error plus +metadata for rendering, such as source code spans, notes, and suggestions. +This package takes after Rust's diagnostic philosophy: diagnostics should +be pleasant to read, provide rich information about the error, and come in +a standard, machine-readable format. + +Reports can be rendered using a [Renderer], which provides several options +for how to render the result to the user. + +A Report can be converted into a Protobuf using [Report.ToProto]. This can +be serialized to e.g. JSON as an alternative error output. + +The [IndexedFile] type is a generic utility for converting file offsets into +text editor coordinates. E.g., given a byte offset, what is the user-visible +line and column number? package report expects the caller to construct this +information themselves, to avoid recomputing it unnecessarily. + +# Defining Diagnostics + +Generally, to definite a diagnostic, you should define a new Go error type, +and then make it implement [Diagnose]. This has two benefits: + + 1. When someone using your tool as a library looks through a Report, they + can type assert Diagnostic.Err to programmatically determine the nature + of a diagnostic. + + 2. When emitting the diagnostic in different places you get the same UX. + This means you should do this even if the error type will be unexported. + +Sometimes, (2) is not enough of a benefit, in which case you can just use +Report.Errorf() and friends. + +# Diagnostics Style Guide + +Diagnostics created with package report expect to be written in a certain +way. The following guidelines are taken, mostly verbatim, from the Rust +Project's diagnostics style guide. +https:github.com/rust-lang/rustc-dev-guide/blob/master/src/diagnostics.md + +The golden rule: Users will see diagnostics when they are frustrated. Do not +make them more frustrated. Do not make them feel like your tool does not +respect their intelligence. + + 1. Errors are for semantic constraint violations, i.e., the compiler will + not produce valid output. Warnings are for when the compiler notices + something not strictly forbidden but probably bad. Remarks are + essentially warnings that are not shown to the user by default. + Diagnostic notes are for factual information that adds context to why the + diagnostic was shown. Diagnostic help is for prose suggestions to the + user. Diagnostic debugs are never shown to normal users, and are for + compiler debugging only. + + 2. Diagnostics should be written in plain, friendly English. Your message + will appear on many surfaces, such as terminals and LSP plugin insets. + The golden standard is that the error message should be readable and + understandable by an inexperienced, hung-over programmer whose native + language is not European, displayed on a dirty budget smartphone screen. + + 3. Diagnostic messages do not begin with a capital letter and do not end in + punctuation. The compiler does not ask questions. The words "error", + "warning", "remark", "help", and "note" are NEVER capitalized. Never + refer to "a diagnostic"; prefer something more specific, like "compiler + error". + + 4. Error messages should succinct: short and sweet, keeping in mind (1). + Users will see these messages many, many times. + + 5. The word "illegal" is illegal. We use this term inside the compiler, but + the word may have negative connotations for some people. "Forbidden" is + also forbidden. Prefer "invalid", "not allowed", etc. + + 6. The first span in a diagnostic (the primary span) should be precisely + the code that resulted in the error. Try to avoid more than three spans + in an error. Try to pick the smallest spans you can: instead of + highlighting a whole type definition, try highlighting just its name. + + 7. Try not to emit multiple diagnostics for the same error. This requires + more work in the compiler, but it is worth it for the UX. + + 8. If your tool does not have enough information to emit a good diagnostic, + that is a bug in either your tool, or in the language your tool operates + on (in both cases, it is the tool's job to acquire this information). + + 9. When talking about your tool, call it "the compiler", "the linter", etc. + Your tool is a machine, not a person; therefore it does not speak in + first person. When referring to a programming language's semantics, + rather than the compiler's, use that language's name. For example, + "Go does not support...", "... is not valid Protobuf", "this is a + limitation of C++". +*/ +package report diff --git a/experimental/report/renderer.go b/experimental/report/renderer.go new file mode 100644 index 00000000..6d7ef568 --- /dev/null +++ b/experimental/report/renderer.go @@ -0,0 +1,900 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package report + +import ( + "bytes" + "fmt" + "math/bits" + "slices" + "strconv" + "strings" + "unicode" + + "github.com/rivo/uniseg" +) + +// Renderer configures a diagnostic rendering operation. +type Renderer struct { + // If set, uses a compact one-line format for each diagnostic. + Austere bool + + // If set, rendering results are enriched with ANSI color escapes. + Colorize bool + + // Upgrades all warnings to errors. + WarningsAreErrors bool + + // If set, remark diagnostics will be printed. + // + // Ignored by [Renderer.RenderDiagnostic]. + ShowRemarks bool + + // If set, rendering a diagnostic will show the debug footer. + ShowDebug bool +} + +// Render renders a diagnostic report. +// +// In addition to returning the rendering result, returns whether any +// errors occurred. +func (r Renderer) Render(report *Report) (text string, haveErrors bool) { + var out strings.Builder + var errors, warnings int + for _, diagnostic := range report.Diagnostics { + if !r.ShowRemarks && diagnostic.Level == Remark { + continue + } + + out.WriteString(r.Diagnostic(diagnostic)) + out.WriteString("\n") + if !r.Austere { + out.WriteString("\n") + } + if diagnostic.Level == Error { + errors++ + } + if diagnostic.Level == Warning { + if r.WarningsAreErrors { + errors++ + } else { + warnings++ + } + } + } + if r.Austere { + return out.String(), errors > 0 + } + + c := r.colors() + + pluralize := func(count int, what string) string { + if count == 1 { + return "1 " + what + } + return fmt.Sprint(count, " ", what, "s") + } + + if errors > 0 { + fmt.Fprint(&out, c.bRed, "encountered ", pluralize(errors, "error")) + if warnings > 0 { + fmt.Fprint(&out, " and ", pluralize(warnings, "warning")) + } + fmt.Fprintln(&out, c.reset) + } else if warnings > 0 { + fmt.Fprintln(&out, c.bYellow, "encountered ", pluralize(warnings, "warning")) + } + + out.WriteString(c.reset) + return out.String(), errors > 0 +} + +// Diagnostic renders a single diagnostic to a string. +func (r *Renderer) Diagnostic(d Diagnostic) string { + var level string + switch d.Level { + case Error: + level = "error" + case Warning: + if r.WarningsAreErrors { + level = "error" + } else { + level = "warning" + } + case Remark: + level = "remark" + } + + c := r.colors() + + // For the simple style, we imitate the Go compiler. + if r.Austere { + annotation := d.Primary() + + if annotation.Start.Line == 0 { + if annotation.File.Path == "" { + return fmt.Sprintf( + "%s%s: %s%s", + c.ColorForLevel(d.Level), + level, + d.Err.Error(), + c.reset, + ) + } + + return fmt.Sprintf( + "%s%s: %s: %s%s", + c.ColorForLevel(d.Level), + level, + annotation.File.Path, + d.Err.Error(), + c.reset, + ) + } + + return fmt.Sprintf( + "%s%s: %s:%d:%d: %s%s", + c.ColorForLevel(d.Level), + level, + annotation.File.Path, + annotation.Start.Line, + annotation.Start.Column, + d.Err.Error(), + c.reset, + ) + } + + // For the other styles, we imitate the Rust compiler. See + // https://github.com/rust-lang/rustc-dev-guide/blob/master/src/diagnostics.md + + var out strings.Builder + fmt.Fprint(&out, c.BoldForLevel(d.Level), level, ": ", d.Err.Error(), c.reset) + + // Figure out how wide the line bar needs to be. This is given by + // the width of the largest line value among the annotations. + var greatestLine int + for _, snip := range d.Annotations { + greatestLine = max(greatestLine, snip.End.Line) + } + lineBarWidth := len(strconv.Itoa(greatestLine)) // Easier than messing with math.Log10() + lineBarWidth = max(2, lineBarWidth) + + // Render all the diagnostic windows. + parts := partition(d.Annotations, func(a, b *Annotation) bool { return a.File.Path != b.File.Path }) + parts(func(i int, annotations []Annotation) bool { + out.WriteByte('\n') + out.WriteString(c.nBlue) + padBy(&out, lineBarWidth) + + if i == 0 { + primary := d.Annotations[0] + fmt.Fprintf(&out, "--> %s:%d:%d", primary.File.Path, primary.Start.Line, primary.Start.Column) + } else { + primary := annotations[0] + fmt.Fprintf(&out, "::: %s:%d:%d", primary.File.Path, primary.Start.Line, primary.Start.Column) + } + + // Add a blank line after the file. This gives the diagnostic window some + // visual breathing room. + out.WriteByte('\n') + out.WriteString(c.nBlue) + padBy(&out, lineBarWidth) + out.WriteString(" | ") + + window := buildWindow(d.Level, annotations) + window.Render(lineBarWidth, &c, &out) + return true + }) + + // Render a remedial file name for spanless errors. + if len(d.Annotations) == 0 && d.InFile != "" { + out.WriteByte('\n') + out.WriteString(c.nBlue) + padBy(&out, lineBarWidth-1) + + fmt.Fprintf(&out, "--> %s", d.InFile) + } + + // Render the footers. For simplicity we collect them into an array first. + footers := make([][3]string, 0, len(d.Notes)+len(d.Help)+len(d.Debug)) + for _, note := range d.Notes { + footers = append(footers, [3]string{c.bCyan, "note", note}) + } + for _, help := range d.Help { + footers = append(footers, [3]string{c.bCyan, "help", help}) + } + for _, debug := range d.Debug { + footers = append(footers, [3]string{c.bRed, "debug", debug}) + } + for _, footer := range footers { + out.WriteByte('\n') + out.WriteString(c.nBlue) + padBy(&out, lineBarWidth) + out.WriteString(" = ") + fmt.Fprint(&out, footer[0], footer[1], ": ", c.reset) + for i, line := range strings.Split(footer[2], "\n") { + if i > 0 { + out.WriteByte('\n') + margin := lineBarWidth + 3 + len(footer[1]) + 2 + padBy(&out, margin) + } + out.WriteString(line) + } + } + + out.WriteString(c.reset) + return out.String() +} + +func (r *Renderer) colors() color { + if !r.Colorize { + return color{r: r} + } + + return color{ + r: r, + reset: "\033[0m", + nRed: "\033[0;31m", + nYellow: "\033[0;33m", + nCyan: "\033[0;36m", + nBlue: "\033[0;34m", + bRed: "\033[1;31m", + bYellow: "\033[1;33m", + bCyan: "\033[1;36m", + bBlue: "\033[1;34m", + } +} + +// color is the colors used for pretty-rendering diagnostics. +type color struct { + r *Renderer + + reset string + // Normal colors. + nRed, nYellow, nCyan, nBlue string + // Bold colors. + bRed, bYellow, bCyan, bBlue string +} + +func (c color) ColorForLevel(l Level) string { + switch l { + case Error: + return c.nRed + case Warning: + if c.r.WarningsAreErrors { + return c.nRed + } + return c.nYellow + case Remark: + return c.nCyan + case note: + return c.nBlue + default: + return "" + } +} + +func (c color) BoldForLevel(l Level) string { + switch l { + case Error: + return c.bRed + case Warning: + if c.r.WarningsAreErrors { + return c.nRed + } + return c.bYellow + case Remark: + return c.bCyan + case note: + return c.bBlue + default: + return "" + } +} + +// window is an intermediate structure for rendering an annotated code snippet +// consisting of multiple spans on the same file. +type window struct { + file File + // The line number at which the text starts in the overall source file. + start int + // The range this window's text occupies in the containing source File. + offsets [2]int + // A list of all underline elements in this window. Must be sorted + // according to cmpUnderlines. + underlines []underline + multilines []multiline +} + +// buildWindow builds a diagnostic window for the given annotations, which must all have +// the same file. +func buildWindow(level Level, annotations []Annotation) *window { + w := new(window) + w.file = annotations[0].File + + // Calculate the range of the file we will be printing. This is given + // by every line that has a piece of diagnostic in it. To find this, we + // calculate the join of all of the spans in the window, and find the + // nearest \n runes in the text. + w.start = annotations[0].Start.Line + w.offsets[0] = annotations[0].Start.Offset + for _, snip := range annotations { + w.start = min(w.start, snip.Start.Line) + w.offsets[0] = min(w.offsets[0], snip.Start.Offset) + w.offsets[1] = max(w.offsets[1], snip.End.Offset) + } + // Now, find the newlines before and after the given ranges, respectively. + // This snaps the range to start immediately after a newline (or SOF) and + // end immediately before a newline (or EOF). + w.offsets[0] = strings.LastIndexByte(w.file.Text[:w.offsets[0]], '\n') + 1 // +1 gives the byte *after* the newline. + if end := strings.IndexByte(w.file.Text[w.offsets[1]:], '\n'); end != -1 { + w.offsets[1] += end + } else { + w.offsets[1] = len(w.file.Text) + } + + // Now, convert each span into an underline or multiline. + for _, snippet := range annotations { + if snippet.Start.Line != snippet.End.Line { + w.multilines = append(w.multilines, multiline{ + start: snippet.Start.Line, + end: snippet.End.Line, + startWidth: snippet.Start.Column, + endWidth: snippet.End.Column, + level: note, + message: snippet.Message, + }) + ml := &w.multilines[len(w.multilines)-1] + + if ml.startWidth == ml.endWidth { + ml.endWidth++ + } + + // Calculate whether this snippet starts on the first non-space rune of + // the line. + if snippet.Start.Offset != 0 { + firstLineStart := strings.LastIndexByte(w.file.Text[:snippet.Start.Offset-1], '\n') + if !strings.ContainsFunc( + w.file.Text[firstLineStart+1:snippet.Start.Offset-1], + func(r rune) bool { return !unicode.IsSpace(r) }, + ) { + ml.startWidth = 0 + } + } + + if snippet.Primary { + ml.level = level + } + continue + } + + w.underlines = append(w.underlines, underline{ + line: snippet.Start.Line, + start: snippet.Start.Column, + end: snippet.End.Column, + level: note, + message: snippet.Message, + }) + + ul := &w.underlines[len(w.underlines)-1] + if snippet.Primary { + ul.level = level + } + if ul.start == ul.end { + ul.end++ + } + + // Make sure no empty underlines exist. + if ul.Len() == 0 { + ul.start++ + } + } + + slices.SortFunc(w.underlines, cmpUnderlines) + return w +} + +func (w *window) Render(lineBarWidth int, c *color, out *strings.Builder) { + type lineInfo struct { + sidebar []*multiline + underlines []string + shouldEmit bool + } + + lines := strings.Split(w.file.Text[w.offsets[0]:w.offsets[1]], "\n") + // Populate ancillary info for each line. + info := make([]lineInfo, len(lines)) + + // First, lay out the multilines, and compute how wide the sidebar is. + for i := range w.multilines { + multi := &w.multilines[i] + // Find the smallest unused index by every line in the range. + var bitset uint + for i := multi.start; i <= multi.end; i++ { + for i, ml := range info[i-w.start].sidebar { + if ml != nil { + bitset |= 1 << i + } + } + } + idx := bits.TrailingZeros(^bitset) + + // Apply the index to every element of sidebar. + for i := multi.start; i <= multi.end; i++ { + line := &info[i-w.start].sidebar + for len(*line) < idx+1 { + *line = append(*line, nil) + } + (*line)[idx] = multi + } + + // Mark the start and end as must-emit. + info[multi.start-w.start].shouldEmit = true + info[multi.end-w.start].shouldEmit = true + } + var sidebarLen int + for _, info := range info { + sidebarLen = max(sidebarLen, len(info.sidebar)) + } + + // Next, we can render the underline parts. This aggregates all underlines + // for the same line into rendered chunks + parts := partition(w.underlines, func(a, b *underline) bool { return a.line != b.line }) + parts(func(_ int, part []underline) bool { + cur := &info[part[0].line-w.start] + cur.shouldEmit = true + + // Arrange for a "sidebar prefix" for this line. This is determined by any sidebars that are + // active on this line, even if they end on it. + sidebar := renderSidebar(sidebarLen, -1, -1, c, cur.sidebar) + + // Lay out the physical underlines in reverse order. This will cause longer lines to be + // laid out first, which will be overwritten by shorter ones. + // + // We use a slice instead of a strings.Builder so we can overwrite parts + // as we render different "layers". + var buf []byte + for i := len(part) - 1; i >= 0; i-- { + element := part[i] + if len(buf) < element.end { + newBuf := make([]byte, element.end) + copy(newBuf, buf) + buf = newBuf + } + + // Note that start/end are 1-indexed. + for j := element.start - 1; j < element.end-1; j++ { + buf[j] = byte(element.level) + } + } + + // Now, convert the buffer into a proper string. + var out strings.Builder + parts := partition(buf, func(a, b *byte) bool { return *a != *b }) + parts(func(_ int, line []byte) bool { + level := Level(line[0]) + if line[0] == 0 { + out.WriteString(c.reset) + } else { + out.WriteString(c.BoldForLevel(level)) + } + for range line { + switch level { + case 0: + out.WriteByte(' ') + case note: + out.WriteByte('-') + default: + out.WriteByte('^') + } + } + return true + }) + + // Next we need to find the message that goes inline with the underlines. This will be + // the message belonging to the rightmost underline. + var rightmost *underline + for i := range part { + ul := &part[i] + if rightmost == nil || ul.end > rightmost.end { + rightmost = ul + } + } + underlines := strings.TrimRight(out.String(), " ") + cur.underlines = []string{sidebar + underlines + " " + c.BoldForLevel(rightmost.level) + rightmost.message} + + // Now, do all the other messages, one per line. For each message, we also + // need to draw pipes (|) above each one to connect it to its underline. + // + // This is slightly complicated, because there are two layers: the pipes, and + // whatever message goes on the pipes. + var rest []*underline + for i := range part { + ul := &part[i] + if ul == rightmost || ul.message == "" { + continue + } + rest = append(rest, ul) + } + + for idx := range rest { + buf = buf[:0] // Clear the temp buffer. + + // First, lay out the pipes. Note that rest is not necessarily + // ordered from right to left, so we need to sort the pipes first. + // To deal with this, we make a copy of rest[idx:], sort it appropriately, + // and then lay things out. + // + // This is quadratic, but no one is going to put more than like. five snippets + // in a line, so it's fine. + restSorted := slices.Clone(rest[idx:]) + slices.SortFunc(restSorted, func(a, b *underline) int { + return a.start - b.start + }) + + var nonColorLen int + for _, ul := range restSorted { + col := ul.start - 1 + for nonColorLen < col { + buf = append(buf, ' ') + nonColorLen++ + } + + if nonColorLen == col { + // Two pipes may appear on the same column! + // This is why this is in a conditional. + buf = append(buf, c.BoldForLevel(ul.level)...) + buf = append(buf, '|') + nonColorLen++ + } + } + + // Spat in the one with all the pipes in it as-is. + cur.underlines = append(cur.underlines, strings.TrimRight(sidebar+string(buf), " ")) + + // Then, splat in the message. having two rows like this ensures that + // each message has one pipe directly above it. + if idx >= 0 { + ul := rest[idx] + + actualStart := ul.start - 1 + for _, other := range rest[idx:] { + if other.start <= ul.start { + actualStart += len(c.BoldForLevel(ul.level)) + } + } + for len(buf) < actualStart+len(ul.message)+1 { + buf = append(buf, ' ') + } + + // Make sure we don't crop *part* of an escape. To do this, we look for + // the last ESC in the region we're going to replace. If it is not + // followed by an m, we need to insert that many spaces into buf to avoid + // overwriting it. + writeTo := buf[actualStart:][:len(ul.message)] + lastEsc := bytes.LastIndexByte(writeTo, 033) + if lastEsc != -1 && !bytes.ContainsRune(writeTo[lastEsc:], 'm') { + // If we got here, it means we're going to crop an escape if + // we don't do something about it. + spaceNeeded := len(writeTo) - lastEsc + for i := 0; i < spaceNeeded; i++ { + buf = append(buf, 0) + } + copy(buf[actualStart+lastEsc+spaceNeeded:], buf[actualStart+lastEsc:]) + } + + copy(buf[actualStart:], ul.message) + } + cur.underlines = append(cur.underlines, strings.TrimRight(sidebar+string(buf), " ")) + } + + return true + }) + + //nolint:dupword + // Now that we've laid out the underlines, we can add the starts and ends of all + // of the multilines, which go after the underlines. + // + // The result is that a multiline will look like this: + // + // code + // ____^ + // | code code code + // \______________^ message + var line strings.Builder + for i := range info { + cur := &info[i] + prevStart := -1 + for j, ml := range cur.sidebar { + if ml == nil { + continue + } + + line.Reset() + var isStart bool + switch w.start + i { + case ml.start: + if ml.startWidth == 0 { + continue + } + + isStart = true + fallthrough + case ml.end: + // We need to be flush with the sidebar here, so we trim the trailing space. + sidebar := []byte(strings.TrimRight(renderSidebar(0, -1, prevStart, c, cur.sidebar[:j+1]), " ")) + + // We also need to erase the bars of any multis that are before this multi + // and start/end on the same line. + if !isStart { + for i, otherML := range cur.sidebar[:j+1] { + if otherML != nil && otherML.end == ml.end { + // We assume all the color codes have the same byte length. + codeLen := len(c.bBlue) + idx := i*(2+codeLen) + codeLen + if idx < len(sidebar) { + sidebar[idx] = ' ' + } + } + } + } + + // Delete the last pipe and replace it with a slash or space, depending. + // on orientation. + line.Write(sidebar[:len(sidebar)-1]) + if isStart { + line.WriteByte(' ') + } else { + line.WriteByte('\\') + } + + // Pad out to the gutter of the code block. + remaining := sidebarLen - (j + 1) + padByRune(&line, remaining*2, '_') + + // Pad to right before we need to insert a ^ or - + if isStart { + padByRune(&line, ml.startWidth-1, '_') + } else { + padByRune(&line, ml.endWidth-1, '_') + } + + if ml.level == note { + line.WriteByte('-') + } else { + line.WriteByte('^') + } + if !isStart && ml.message != "" { + line.WriteByte(' ') + line.WriteString(ml.message) + } + cur.underlines = append(cur.underlines, line.String()) + } + + if isStart { + prevStart = j + } else { + prevStart = -1 + } + } + } + + // Make sure to emit any lines adjacent to another line we want to emit, so long as that + // line contains printable characters. + // + // We copy a set of all the lines we plan to emit before this transformation; + // otherwise, doing it in-place will cause every nonempty line after a must-emit line + // to be shown, which we don't want. + mustEmit := make(map[int]bool) + for i := range info { + if info[i].shouldEmit { + mustEmit[i] = true + } + } + for i := range info { + // At least two of the below conditions must be true for + // this line to be shown. Annoyingly, go does not have a conversion + // from bool to int... + var score int + if strings.IndexFunc(lines[i], unicode.IsGraphic) != -1 { + score++ + } + if mustEmit[i-1] { + score++ + } + if mustEmit[i+1] { + score++ + } + if score >= 2 { + info[i].shouldEmit = true + } + } + + lastEmit := w.start + for i, line := range lines { + cur := &info[i] + lineno := i + w.start + + if !cur.shouldEmit { + continue + } + + // If the last multi of the previous line starts on that line, make its + // pipe here a slash so that it connects properly. + slashAt := -1 + if i > 0 { + prevSidebar := info[i-1].sidebar + if len(prevSidebar) > 0 && + prevSidebar[len(prevSidebar)-1].start == lineno-1 && + prevSidebar[len(prevSidebar)-1].startWidth > 0 { + slashAt = len(prevSidebar) - 1 + } + } + sidebar := renderSidebar(sidebarLen, lineno, slashAt, c, cur.sidebar) + + if i > 0 && !info[i-1].shouldEmit { + // Generate a visual break if this is right after a real line. + out.WriteByte('\n') + out.WriteString(c.nBlue) + padBy(out, lineBarWidth-2) + out.WriteString("... ") + + // Generate a sidebar as before but this time we want to look at the + // last line that was actually emitted. + slashAt := -1 + prevSidebar := info[lastEmit-w.start].sidebar + if len(prevSidebar) > 0 && + prevSidebar[len(prevSidebar)-1].start == lastEmit && + prevSidebar[len(prevSidebar)-1].startWidth > 0 { + slashAt = len(prevSidebar) - 1 + } + + out.WriteString(renderSidebar(sidebarLen, lineno, slashAt, c, cur.sidebar)) + } + + // Ok, we are definitely printing this line out. + fmt.Fprintf(out, "\n%s%*d | %s%s", c.nBlue, lineBarWidth, lineno, sidebar, c.reset) + lastEmit = lineno + + // Replace tabstops with spaces. + var column int + // We can't just use StringWidth, because that doesn't respect tabstops + // correctly. + for { + nextTab := strings.IndexByte(line, '\t') + if nextTab != -1 { + column += uniseg.StringWidth(line[:nextTab]) + out.WriteString(line[:nextTab]) + + tab := TabstopWidth - (column % TabstopWidth) + column += tab + padBy(out, tab) + + line = line[nextTab+1:] + } else { + out.WriteString(line) + break + } + } + + // If this happens to be an annotated line, this is when it gets annotated. + for _, line := range cur.underlines { + out.WriteByte('\n') + out.WriteString(c.nBlue) + padBy(out, lineBarWidth) + out.WriteString(" | ") + out.WriteString(line) + } + } +} + +type underline struct { + line int + start, end int + level Level + message string +} + +func (u underline) Len() int { + return u.end - u.start +} + +func cmpUnderlines(a, b underline) int { + if diff := a.line - b.line; diff != 0 { + return diff + } + if diff := a.level - b.level; diff != 0 { + return int(diff) + } + if diff := a.Len() - b.Len(); diff != 0 { + return diff + } + return a.start - b.start +} + +type multiline struct { + start, end int + startWidth, endWidth int + level Level + message string +} + +func renderSidebar(bars, lineno, slashAt int, c *color, multis []*multiline) string { + var sidebar strings.Builder + for i, ml := range multis { + if ml == nil { + sidebar.WriteString(" ") + continue + } + + sidebar.WriteString(c.BoldForLevel(ml.level)) + + switch { + case slashAt == i: + sidebar.WriteByte('/') + case lineno != ml.start: + sidebar.WriteByte('|') + case ml.startWidth == 0: + sidebar.WriteByte('/') + default: + sidebar.WriteByte(' ') + } + sidebar.WriteByte(' ') + } + for sidebar.Len() < bars*2 { + sidebar.WriteByte(' ') + } + return sidebar.String() +} + +// partition returns an iterator of subslices of s such that each yielded +// slice is delimited according to delimit. Also yields the starting index of +// the subslice. +// +// In other words, suppose delimit is !=. Then, the slice [a a a b c c] is yielded +// as the subslices [a a a], [b], and [c c c]. +// +// Will never yield an empty slice. +// +//nolint:dupword +func partition[T any](s []T, delimit func(a, b *T) bool) func(func(int, []T) bool) { + return func(yield func(int, []T) bool) { + var start int + for i := 1; i < len(s); i++ { + if delimit(&s[i-1], &s[i]) { + if !yield(start, s[start:i]) { + break + } + start = i + } + } + rest := s[start:] + if len(rest) > 0 { + yield(start, rest) + } + } +} + +func padBy(out *strings.Builder, spaces int) { + for i := 0; i < spaces; i++ { + out.WriteByte(' ') + } +} + +func padByRune(out *strings.Builder, spaces int, r rune) { + for i := 0; i < spaces; i++ { + out.WriteRune(r) + } +} diff --git a/experimental/report/renderer_test.go b/experimental/report/renderer_test.go new file mode 100644 index 00000000..3c575370 --- /dev/null +++ b/experimental/report/renderer_test.go @@ -0,0 +1,132 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package report_test + +import ( + "encoding/base64" + "encoding/json" + "regexp" + "strings" + "testing" + + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" + "gopkg.in/yaml.v3" + + "github.com/bufbuild/protocompile/experimental/report" + "github.com/bufbuild/protocompile/internal/golden" +) + +var ansiEscapePat = regexp.MustCompile("\033\\[([\\d;]*)m") + +// ansiToMarkup converts ANSI escapes we care about in `text` into markup that is hopefully +// easier for humans to parse. +func ansiToMarkup(text string) string { + return ansiEscapePat.ReplaceAllStringFunc(text, func(needle string) string { + // We only handle a small subset of things we know. + code := ansiEscapePat.FindStringSubmatch(needle)[1] + + colors := []string{"blk", "red", "grn", "ylw", "blu", "mta", "cyn", "wht"} + place := []string{"", "", "bg", "", "+", "bg.+"} + + if code == "0" { + code = "reset" + } else { + parts := strings.SplitN(code, ";", 2) + var name strings.Builder + if parts[0] == "1" { + name.WriteString("b.") + } + name.WriteString(place[(parts[1][0]-'0')/2]) + name.WriteString(colors[parts[1][1]-'0']) + code = name.String() + } + + return "⟨" + code + "⟩" + }) +} + +func TestRender(t *testing.T) { + t.Parallel() + + corpus := golden.Corpus{ + Root: "testdata", + Refresh: "PROTOCOMPILE_REFRESH", + Extension: "yaml", + Outputs: []golden.Output{ + {Extension: "simple.txt"}, + {Extension: "fancy.txt"}, + {Extension: "color.txt"}, + }, + } + + corpus.Run(t, func(t *testing.T, path, text string, outputs []string) { + r := new(report.Report) + err := r.AppendFromProto(func(m proto.Message) error { + // Convert YAML -> JSON. We don't use protoyaml here because that depends + // on GRPC and that depends on the universe. + bag := map[string]any{} + if err := yaml.Unmarshal([]byte(text), bag); err != nil { + return err + } + + // Convert files.text into base64 to appease protojson. + if files, ok := bag["files"]; ok { + for _, file := range files.([]any) { + if file, ok := file.(map[string]any); ok { + if text, ok := file["text"].(string); ok { + file["text"] = base64.RawStdEncoding.EncodeToString([]byte(text)) + } + } + } + } + + json, err := json.Marshal(bag) + if err != nil { + return err + } + + return protojson.Unmarshal(json, m) + }) + if err != nil { + t.Fatalf("failed to parse input %q: %v", path, err) + } + + text, _ = report.Renderer{ + Austere: true, + ShowRemarks: true, + ShowDebug: true, + }.Render(r) + outputs[0] = text + if text != "" { + text += "\n" + } + + text, _ = report.Renderer{ + Austere: false, + ShowRemarks: true, + ShowDebug: true, + }.Render(r) + outputs[1] = text + + text, _ = report.Renderer{ + Colorize: true, + Austere: false, + ShowRemarks: true, + ShowDebug: true, + }.Render(r) + outputs[2] = ansiToMarkup(text) + }) +} diff --git a/experimental/report/report.go b/experimental/report/report.go new file mode 100644 index 00000000..413051cb --- /dev/null +++ b/experimental/report/report.go @@ -0,0 +1,475 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package report + +import ( + "errors" + "fmt" + "runtime" + "slices" + "strings" + + "google.golang.org/protobuf/proto" + + compilerpb "github.com/bufbuild/protocompile/internal/gen/buf/compiler/v1" +) + +const ( + // Red. Indicates a semantic constraint violation. + Error Level = 1 + iota + // Yellow. Indicates something that probably should not be ignored. + Warning + // Cyan. This is the diagnostics version of "info". + Remark + + note // Used internally within the diagnostic renderer. +) + +// Level represents the severity of a diagnostic message. +type Level int8 + +// Diagnose is an error that can be rendered as a diagnostic. +type Diagnose interface { + error + + // Diagnose writes out this error to the given diagnostic. + // + // This function should not set Level nor Err; those are set by the + // diagnostics framework. + Diagnose(*Diagnostic) +} + +// Diagnostic is a type of error that can be rendered as a rich diagnostic. +// +// Not all Diagnostics are "errors", even though Diagnostic does embed error; +// some represent warnings, or perhaps debugging remarks. +type Diagnostic struct { + // The error that prompted this diagnostic. Its Error() return is used + // as the diagnostic message. + Err error + + // The kind of diagnostic this is, which affects how and whether it is shown + // to users. + Level Level + + // Stage is an opaque identifier for the "stage" that a diagnostic occurred in. + // See [Report.Sort]. + Stage int + + // The file this diagnostic occurs in, if it has no associated Annotations. This + // is used for errors like "file too big" that cannot be given a snippet. + InFile string + + // A list of annotated source code spans in the diagnostic. + Annotations []Annotation + + // Notes and help messages to include at the end of the diagnostic, after the + // Annotations. + Notes, Help, Debug []string +} + +// Annotation is an annotated source code snippet within a [Diagnostic]. +type Annotation struct { + // A message to show under this snippet. May be empty. + Message string + + // Whether this is a "primary" snippet, which is used for deciding whether or not + // to mark the snippet with the same color as the overall diagnostic. + Primary bool + + // The file this snippet is from. Note that Annotations with the same file name + // are treated as being part of the same file, regardless of that file's contents. + File File + // Start and end positions for this snippet, within the above file. + Start, End Location +} + +// Primary returns this diagnostic's primary snippet, if it has one. +// +// If it doesn't have one, it returns a dummy annotation referring to InFile. +func (d *Diagnostic) Primary() Annotation { + for _, annotation := range d.Annotations { + if annotation.Primary { + return annotation + } + } + + return Annotation{ + File: File{Path: d.InFile}, + Primary: true, + } +} + +// With applies the given options to this diagnostic. +func (d *Diagnostic) With(options ...DiagnosticOption) { + for _, option := range options { + option(d) + } +} + +// DiagnosticOption is an option that can be applied to a [Diagnostic]. +type DiagnosticOption func(*Diagnostic) + +// InFile returns a DiagnosticOption that causes a diagnostic without a primary +// span to mention the given file. +func InFile(path string) DiagnosticOption { + return func(d *Diagnostic) { d.InFile = path } +} + +// Snippetf returns a DiagnosticOption that adds a new snippet to a diagnostic. +// +// The first annotation added is the "primary" annotation, and will be rendered +// differently from the others. +func Snippet[Spanner interface{ Span() S }, S Span](at Spanner) DiagnosticOption { + return Snippetf(at, "") +} + +// Snippetf returns a DiagnosticOption that adds a new snippet to a diagnostic with the given message. +// +// The first annotation added is the "primary" annotation, and will be rendered +// differently from the others. +func Snippetf[Spanner interface{ Span() S }, S Span](at Spanner, format string, args ...any) DiagnosticOption { + return SnippetAtf(at.Span(), format, args...) +} + +// SnippetAtf is like [Snippet], but takes a span rather than something with a Span() method. +func SnippetAt(span Span) DiagnosticOption { + return SnippetAtf(span, "") +} + +// SnippetAtf is like [Snippetf], but takes a span rather than something with a Span() method. +func SnippetAtf(span Span, format string, args ...any) DiagnosticOption { + // This is hoisted out to improve stack traces when something goes awry in the + // argument to With(). By hoisting, it correctly blames the right invocation to Snippet(). + annotation := Annotation{ + File: span.File(), + Start: span.Start(), + End: span.End(), + Message: fmt.Sprintf(format, args...), + } + return func(d *Diagnostic) { + annotation.Primary = len(d.Annotations) == 0 + d.Annotations = append(d.Annotations, annotation) + } +} + +// Note returns a DiagnosticOption that provides the user with context about the +// diagnostic, after the annotations. +// +// The arguments are stringified with [fmt.Sprint]. +func Note(args ...any) DiagnosticOption { + return func(d *Diagnostic) { + d.Notes = append(d.Notes, fmt.Sprint(args...)) + } +} + +// Notef is like [Note], but it calls [fmt.Sprintf] internally for you. +func Notef(format string, args ...any) DiagnosticOption { + return func(d *Diagnostic) { + d.Notes = append(d.Notes, fmt.Sprintf(format, args...)) + } +} + +// Help returns a DiagnosticOption that provides the user with a helpful prose +// suggestion for resolving the diagnostic. +// +// The arguments are stringified with [fmt.Sprint]. +func Help(args ...any) DiagnosticOption { + return func(d *Diagnostic) { + d.Help = append(d.Help, fmt.Sprint(args...)) + } +} + +// Helpf is like [Help], but it calls [fmt.Sprintf] internally for you. +func Helpf(format string, args ...any) DiagnosticOption { + return func(d *Diagnostic) { + d.Help = append(d.Help, fmt.Sprintf(format, args...)) + } +} + +// Debug returns a DiagnosticOption appends debugging information to a diagnostic that +// is not intended to be shown to normal users. +// +// The arguments are stringified with [fmt.Sprint]. +func Debug(args ...any) DiagnosticOption { + return func(d *Diagnostic) { + d.Help = append(d.Help, fmt.Sprint(args...)) + } +} + +// Debugf is like [Debug], but it calls [fmt.Sprintf] internally for you. +func Debugf(format string, args ...any) DiagnosticOption { + return func(d *Diagnostic) { + d.Help = append(d.Help, fmt.Sprintf(format, args...)) + } +} + +// Report is a collection of diagnostics. +// +// Report is not thread-safe (in the sense that distinct goroutines should not +// all write to Report at the same time). Instead, the recommendation is to create +// multiple reports and then merge them, using [Report.Sort] to canonicalize the result. +type Report struct { + // The actual diagnostics on this report. Generally, you'll want to use one of + // the helpers like [Report.Error] instead of appending directly. + Diagnostics []Diagnostic + + // The stage to apply to any new diagnostics created with this report. + // + // Diagnostics with the same stage will sort together. See [Report.Sort]. + Stage int + + // When greater than zero, this will capture debugging information at the + // site of each call to Error() etc. This will make diagnostic construction + // orders of magnitude slower; it is intended to help tool writers to debug + // their diagnostics. + // + // Higher values mean more debugging information. What debugging information + // is actually provided is subject to change. + Tracing int +} + +// Error pushes an error diagnostic onto this report. +func (r *Report) Error(err Diagnose) { + err.Diagnose(r.push(1, err, Error)) +} + +// Warn pushes a warning diagnostic onto this report. +func (r *Report) Warn(err Diagnose) { + err.Diagnose(r.push(1, err, Warning)) +} + +// Remark pushes a remark diagnostic onto this report. +func (r *Report) Remark(err Diagnose) { + err.Diagnose(r.push(1, err, Remark)) +} + +// Errorf creates a new error diagnostic with an unspecified error type; analogous to +// [fmt.Errorf]. +func (r *Report) Errorf(format string, args ...any) *Diagnostic { + return r.push(1, fmt.Errorf(format, args...), Error) +} + +// Warnf creates a new warning diagnostic with an unspecified error type; analogous to +// [fmt.Errorf]. +func (r *Report) Warnf(format string, args ...any) *Diagnostic { + return r.push(1, fmt.Errorf(format, args...), Warning) +} + +// Remarkf creates a new remark diagnostic with an unspecified error type; analogous to +// [fmt.Errorf]. +func (r *Report) Remarkf(format string, args ...any) *Diagnostic { + return r.push(1, fmt.Errorf(format, args...), Remark) +} + +// Sort canonicalizes this report's diagnostic order according to an specific +// ordering criteria. Diagnostics are sorted by, in order; +// +// File name of primary span, stage, start offset of primary snippet, end offset +// of primary snippet, content of error message. +// +// Where diagnostics have no primary span, the file is treated as empty and the +// offsets are treated as zero. +// +// These criteria ensure that diagnostics for the same file go together, +// diagnostics for the same stage (lex, parse, etc) go together, and they are +// otherwise ordered by where they occur in the file. +func (r *Report) Sort() { + slices.SortFunc(r.Diagnostics, func(a, b Diagnostic) int { + aPrime := a.Primary() + bPrime := b.Primary() + + if diff := strings.Compare(aPrime.File.Path, bPrime.File.Path); diff != 0 { + return diff + } + + if diff := a.Stage - b.Stage; diff != 0 { + return diff + } + + if diff := aPrime.Start.Offset - bPrime.Start.Offset; diff != 0 { + return diff + } + + if diff := aPrime.End.Offset - bPrime.End.Offset; diff != 0 { + return diff + } + + return strings.Compare(a.Err.Error(), b.Err.Error()) + }) +} + +// ToProto converts this report into a Protobuf message for serialization. +// +// This operation is lossy: only the Diagnostics slice is serialized. It also discards +// concrete types of Diagnostic.Err, replacing them with opaque [errors.New] values +// on deserialization. +// +// It will also deduplicate [File] values based on their paths, paying no attention to +// their contents. +func (r *Report) ToProto() proto.Message { + proto := new(compilerpb.Report) + + fileToIndex := map[string]uint32{} + for _, d := range r.Diagnostics { + dProto := &compilerpb.Diagnostic{ + Message: d.Err.Error(), + Level: compilerpb.Diagnostic_Level(d.Level), + InFile: d.InFile, + Notes: d.Notes, + Help: d.Help, + Debug: d.Debug, + } + + for _, snip := range d.Annotations { + file, ok := fileToIndex[snip.File.Path] + if !ok { + file = uint32(len(proto.Files)) + fileToIndex[snip.File.Path] = file + + proto.Files = append(proto.Files, &compilerpb.Report_File{ + Path: snip.File.Path, + Text: []byte(snip.File.Text), + }) + } + + dProto.Annotations = append(dProto.Annotations, &compilerpb.Diagnostic_Annotation{ + File: file, + Start: uint32(snip.Start.Offset), + End: uint32(snip.End.Offset), + Message: snip.Message, + Primary: snip.Primary, + }) + } + + proto.Diagnostics = append(proto.Diagnostics, dProto) + } + + return proto +} + +// FromProto appends diagnostics from a Protobuf message to this report. +// +// deserialize will be called with an empty message that should be deserialized +// onto, which this function will then convert into [Diagnostic]s to populate the +// report with. +func (r *Report) AppendFromProto(deserialize func(proto.Message) error) error { + proto := new(compilerpb.Report) + if err := deserialize(proto); err != nil { + return err + } + + files := make([]*IndexedFile, len(proto.Files)) + for i, fProto := range proto.Files { + files[i] = NewIndexedFile(File{ + Path: fProto.Path, + Text: string(fProto.Text), + }) + } + + for i, dProto := range proto.Diagnostics { + if dProto.Message == "" { + return fmt.Errorf("protocompile/report: missing message for diagnostic[%d]", i) + } + level := Level(dProto.Level) + switch level { + case Error, Warning, Remark: + default: + return fmt.Errorf("protocompile/report: invalid value for Diagnostic.level: %d", int(level)) + } + + d := Diagnostic{ + Err: errors.New(dProto.Message), + Level: level, + InFile: dProto.InFile, + Notes: dProto.Notes, + Help: dProto.Help, + Debug: dProto.Debug, + } + + var havePrimary bool + for j, snip := range dProto.Annotations { + if int(snip.File) >= len(proto.Files) { + return fmt.Errorf( + "protocompile/report: invalid file index for diagnostic[%d].annotation[%d]: %d", + i, j, snip.File, + ) + } + + file := files[snip.File] + if int(snip.Start) >= len(file.File().Text) || + int(snip.End) > len(file.File().Text) || + snip.Start > snip.End { + return fmt.Errorf( + "protocompile/report: out-of-bounds span for diagnostic[%d].annotation[%d]: [%d:%d]", + i, j, snip.Start, snip.End, + ) + } + + d.Annotations = append(d.Annotations, Annotation{ + File: file.File(), + Start: file.Search(int(snip.Start)), + End: file.Search(int(snip.End)), + Message: snip.Message, + Primary: snip.Primary, + }) + havePrimary = havePrimary || snip.Primary + } + + if !havePrimary && len(d.Annotations) > 0 { + d.Annotations[0].Primary = true + } + + r.Diagnostics = append(r.Diagnostics, d) + } + + return nil +} + +// push is the core "make me a diagnostic" function. +// +//nolint:unparam +func (r *Report) push(skip int, err error, level Level) *Diagnostic { + r.Diagnostics = append(r.Diagnostics, Diagnostic{ + Err: err, + Level: level, + Stage: r.Stage, + }) + d := &(r.Diagnostics)[len(r.Diagnostics)-1] + + // If debugging is on, capture a stack trace. + if r.Tracing > 0 { + // Unwind the stack to find program counter information. + pc := make([]uintptr, 64) + pc = pc[:runtime.Callers(skip+2, pc)] + + // Fill trace with the result. + var ( + zero runtime.Frame + buf strings.Builder + ) + frames := runtime.CallersFrames(pc) + for i := 0; i < r.Tracing; i++ { + frame, more := frames.Next() + if frame == zero || !more { + break + } + fmt.Fprintf(&buf, "at %s\n %s:%d\n", frame.Function, frame.File, frame.Line) + } + d.With(Debug(buf.String())) + } + + return d +} diff --git a/experimental/report/span.go b/experimental/report/span.go new file mode 100644 index 00000000..8ab173db --- /dev/null +++ b/experimental/report/span.go @@ -0,0 +1,197 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package report + +import ( + "slices" + "strings" + "sync" + + "github.com/rivo/uniseg" +) + +// The size we render all tabstops as. +const TabstopWidth int = 4 + +// Span is any type that can be used to generate source code information for a diagnostic. +type Span interface { + File() File + Start() Location + End() Location +} + +// File is a source code file involved in a diagnostic. +type File struct { + // The filesystem path for this string. It doesn't need to be a real path, but + // it will be used to deduplicate spans according to their file. + Path string + + // The complete text of the file. + Text string +} + +// Location is a user-displayable location within a source code file. +type Location struct { + // The byte offset for this location. + Offset int + + // The line and column for this location, 1-indexed. + // + // Note that Column is not Offset with the length of all + // previous lines subtracted off; it takes into account the + // Unicode width. The rune A is one column wide, the rune + // 貓 is two columns wide, and the multi-rune emoji presentation + // sequence 🐈‍⬛ is also two columns wide. + // + // Because these are 1-indexed, a zero Line can be used as a sentinel. + Line, Column int + + // The ostensible UTF-16 codepoint offset from the start of the line + // for this location. This exists for the benefit of LSP + // implementations. + UTF16 int +} + +// IndexedFile is an index of line information from a [File], which permits +// O(log n) calculation of [Location]s from offsets. +type IndexedFile struct { + file File + + once sync.Once + // A prefix sum of the line lengths of text. Given a byte offset, it is possible + // to recover which line that offset is on by performing a binary search on this + // list. + // + // Alternatively, this slice can be interpreted as the index after each \n in the + // original file. + lines []int + // Similar to the above, but instead using the length of each line in code units + // if it was transcoded to UTF-16. This is required for compatibility with LSP. + utf16Lines []int +} + +// NewIndexedFile constructs a line index for the given text. This is O(n) in the size +// of the text. +func NewIndexedFile(file File) *IndexedFile { + return &IndexedFile{file: file} +} + +// File returns the file that this index indexes. +func (i *IndexedFile) File() File { + return i.file +} + +// Span generates a span using this index. +// +// This is mostly intended for convenience; generally speaking, users of package report +// will want to implement their own [Span] types that use a compressed representation, +// and delay computation of line and column information as late as possible. +func (i *IndexedFile) NewSpan(start, end int) Span { + return naiveSpan{ + file: i.File(), + start: i.Search(start), + end: i.Search(end), + } +} + +// Search searches this index to build full Location information for the given byte +// offset. +func (i *IndexedFile) Search(offset int) Location { + // Compute the prefix sum on-demand. + i.once.Do(func() { + var next, next16 int + + // We add 1 to the return value of IndexByte because we want to work + // with the index immediately *after* the newline byte. + text := i.file.Text + for { + newline := strings.IndexByte(text, '\n') + 1 + if newline == 0 { + break + } + + line := text[:newline] + text = text[newline:] + + i.lines = append(i.lines, next) + next += newline + + // Calculate the length of `line` in UTF-16 code units. + var utf16Len int + for _, r := range line { + utf16Len += utf16RuneLen(r) + } + + i.utf16Lines = append(i.utf16Lines, next16) + next16 += utf16Len + } + + i.lines = append(i.lines, next) + i.utf16Lines = append(i.utf16Lines, next16) + }) + + // Find the smallest index in c.liznes such that lines[line] <= offset. + line, exact := slices.BinarySearch(i.lines, offset) + if !exact { + line-- + } + + // Calculate the column. + chunk := i.file.Text[i.lines[line]:offset] + var column int + // We can't just use StringWidth, because that doesn't respect tabstops + // correctly. + for { + nextTab := strings.IndexByte(chunk, '\t') + if nextTab != -1 { + column += uniseg.StringWidth(chunk[:nextTab]) + column += TabstopWidth - (column % TabstopWidth) + chunk = chunk[nextTab+1:] + } else { + column += uniseg.StringWidth(chunk) + break + } + } + + // Calculate the UTF-16 offset of the offset within its line. + var utf16Col int + for _, r := range chunk { + utf16Col += utf16RuneLen(r) + } + + return Location{ + Offset: offset, + Line: line + 1, + Column: column + 1, + UTF16: utf16Col, + } +} + +func utf16RuneLen(r rune) int { + if r > 0xffff { + return 2 + } + return 1 +} + +type naiveSpan struct { + file File + start, end Location +} + +func (s naiveSpan) File() File { return s.file } +func (s naiveSpan) Start() Location { return s.start } +func (s naiveSpan) End() Location { return s.end } +func (s naiveSpan) Span() Span { return s } diff --git a/experimental/report/testdata/i18n.yaml b/experimental/report/testdata/i18n.yaml new file mode 100644 index 00000000..01eebed0 --- /dev/null +++ b/experimental/report/testdata/i18n.yaml @@ -0,0 +1,60 @@ +# Copyright 2020-2024 Buf Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# buf.compiler.v1.Report + +files: + - path: "foo.proto" + text: | + syntax = "proto4" + + package abc.xyz; + + message 🐈‍⬛ { + string 黑猫 = 1; + string القطة السوداء = 2; + } + - path: "bar.proto" + text: | + import "חתול שחור.proto"; + +diagnostics: + - message: "emoji, CJK, bidi" + level: LEVEL_ERROR + annotations: + - file: 0 + start: 45 + end: 55 + - message: "note: some surfaces render CJK as sub-two-column" + file: 0 + start: 67 + end: 73 + - message: "bidi works if it's quoted, at least" + file: 1 + start: 8 + end: 31 + + # NOTE: This test does not produce correct output because it requires + # bidi handling. Its primary purpose is to show we get it absolutely + # wrong. Rust *also* gets this completely wrong, so we're not gonna bother + # yet, either. https://godbolt.org/z/Eb8eo3fW9 + # + # That said, here are detailed instructions on how to go about fixing + # this: https://github.com/rust-lang/rust/issues/113363 + - message: "bidi (Arabic, Hebrew, Farsi, etc) is broken in some contexts" + level: LEVEL_ERROR + annotations: + - file: 0 + start: 88 + end: 103 \ No newline at end of file diff --git a/experimental/report/testdata/i18n.yaml.color.txt b/experimental/report/testdata/i18n.yaml.color.txt new file mode 100755 index 00000000..a2186d2b --- /dev/null +++ b/experimental/report/testdata/i18n.yaml.color.txt @@ -0,0 +1,20 @@ +⟨b.red⟩error: emoji, CJK, bidi⟨reset⟩ +⟨blu⟩ --> foo.proto:5:9 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨reset⟩message 🐈‍⬛ { +⟨blu⟩ | ⟨reset⟩ ⟨b.red⟩^^⟨reset⟩ ⟨b.red⟩ +⟨blu⟩ 6 | ⟨reset⟩ string 黑猫 = 1; +⟨blu⟩ | ⟨reset⟩ ⟨b.blu⟩----⟨reset⟩ ⟨b.blu⟩note: some surfaces render CJK as sub-two-column +⟨blu⟩ ::: bar.proto:1:9 +⟨blu⟩ | +⟨blu⟩ 1 | ⟨reset⟩import "חתול שחור.proto"; +⟨blu⟩ | ⟨reset⟩ ⟨b.blu⟩---------------⟨reset⟩ ⟨b.blu⟩bidi works if it's quoted, at least⟨reset⟩ + +⟨b.red⟩error: bidi (Arabic, Hebrew, Farsi, etc) is broken in some contexts⟨reset⟩ +⟨blu⟩ --> foo.proto:7:10 +⟨blu⟩ | +⟨blu⟩ 7 | ⟨reset⟩ string القطة السوداء = 2; +⟨blu⟩ | ⟨reset⟩ ⟨b.red⟩^^^^^^^^⟨reset⟩ ⟨b.red⟩⟨reset⟩ + +⟨b.red⟩encountered 2 errors⟨reset⟩ +⟨reset⟩ \ No newline at end of file diff --git a/experimental/report/testdata/i18n.yaml.fancy.txt b/experimental/report/testdata/i18n.yaml.fancy.txt new file mode 100755 index 00000000..54ba7256 --- /dev/null +++ b/experimental/report/testdata/i18n.yaml.fancy.txt @@ -0,0 +1,19 @@ +error: emoji, CJK, bidi + --> foo.proto:5:9 + | + 5 | message 🐈‍⬛ { + | ^^ + 6 | string 黑猫 = 1; + | ---- note: some surfaces render CJK as sub-two-column + ::: bar.proto:1:9 + | + 1 | import "חתול שחור.proto"; + | --------------- bidi works if it's quoted, at least + +error: bidi (Arabic, Hebrew, Farsi, etc) is broken in some contexts + --> foo.proto:7:10 + | + 7 | string القطة السوداء = 2; + | ^^^^^^^^ + +encountered 2 errors diff --git a/experimental/report/testdata/i18n.yaml.simple.txt b/experimental/report/testdata/i18n.yaml.simple.txt new file mode 100755 index 00000000..9a4a39fc --- /dev/null +++ b/experimental/report/testdata/i18n.yaml.simple.txt @@ -0,0 +1,2 @@ +error: foo.proto:5:9: emoji, CJK, bidi +error: foo.proto:7:10: bidi (Arabic, Hebrew, Farsi, etc) is broken in some contexts diff --git a/experimental/report/testdata/multi-file.yaml b/experimental/report/testdata/multi-file.yaml new file mode 100644 index 00000000..d5629c0e --- /dev/null +++ b/experimental/report/testdata/multi-file.yaml @@ -0,0 +1,68 @@ +# Copyright 2020-2024 Buf Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# buf.compiler.v1.Report + +files: + - path: "foo.proto" + text: | + syntax = "proto4" + + package abc.xyz; + + message Blah { + required size_t x = 0; + } + - path: "bar.proto" + text: | + syntax = "proto4" + + package abc.xyz2; + + message Foo { + required int32 y = 0; + } + +diagnostics: + - message: two files + level: LEVEL_ERROR + annotations: + - message: foo + file: 0 + start: 27 + end: 34 + - message: bar + file: 0 + start: 45 + end: 49 + - message: baz + file: 1 + start: 27 + end: 34 + + - message: three files + level: LEVEL_ERROR + annotations: + - message: foo + file: 0 + start: 27 + end: 34 + - message: baz + file: 1 + start: 27 + end: 34 + - message: bar + file: 0 + start: 45 + end: 49 diff --git a/experimental/report/testdata/multi-file.yaml.color.txt b/experimental/report/testdata/multi-file.yaml.color.txt new file mode 100755 index 00000000..98a919ee --- /dev/null +++ b/experimental/report/testdata/multi-file.yaml.color.txt @@ -0,0 +1,29 @@ +⟨b.red⟩error: two files⟨reset⟩ +⟨blu⟩ --> foo.proto:3:9 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz; +⟨blu⟩ | ⟨reset⟩ ⟨b.red⟩^^^^^^^⟨reset⟩ ⟨b.red⟩foo +⟨blu⟩ 4 | ⟨reset⟩ +⟨blu⟩ 5 | ⟨reset⟩message Blah { +⟨blu⟩ | ⟨reset⟩ ⟨b.blu⟩----⟨reset⟩ ⟨b.blu⟩bar +⟨blu⟩ ::: bar.proto:3:9 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz2; +⟨blu⟩ | ⟨reset⟩ ⟨b.blu⟩-------⟨reset⟩ ⟨b.blu⟩baz⟨reset⟩ + +⟨b.red⟩error: three files⟨reset⟩ +⟨blu⟩ --> foo.proto:3:9 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz; +⟨blu⟩ | ⟨reset⟩ ⟨b.red⟩^^^^^^^⟨reset⟩ ⟨b.red⟩foo +⟨blu⟩ ::: bar.proto:3:9 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz2; +⟨blu⟩ | ⟨reset⟩ ⟨b.blu⟩-------⟨reset⟩ ⟨b.blu⟩baz +⟨blu⟩ ::: foo.proto:5:9 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨reset⟩message Blah { +⟨blu⟩ | ⟨reset⟩ ⟨b.blu⟩----⟨reset⟩ ⟨b.blu⟩bar⟨reset⟩ + +⟨b.red⟩encountered 2 errors⟨reset⟩ +⟨reset⟩ \ No newline at end of file diff --git a/experimental/report/testdata/multi-file.yaml.fancy.txt b/experimental/report/testdata/multi-file.yaml.fancy.txt new file mode 100755 index 00000000..0f49e78e --- /dev/null +++ b/experimental/report/testdata/multi-file.yaml.fancy.txt @@ -0,0 +1,28 @@ +error: two files + --> foo.proto:3:9 + | + 3 | package abc.xyz; + | ^^^^^^^ foo + 4 | + 5 | message Blah { + | ---- bar + ::: bar.proto:3:9 + | + 3 | package abc.xyz2; + | ------- baz + +error: three files + --> foo.proto:3:9 + | + 3 | package abc.xyz; + | ^^^^^^^ foo + ::: bar.proto:3:9 + | + 3 | package abc.xyz2; + | ------- baz + ::: foo.proto:5:9 + | + 5 | message Blah { + | ---- bar + +encountered 2 errors diff --git a/experimental/report/testdata/multi-file.yaml.simple.txt b/experimental/report/testdata/multi-file.yaml.simple.txt new file mode 100755 index 00000000..c0b1e01f --- /dev/null +++ b/experimental/report/testdata/multi-file.yaml.simple.txt @@ -0,0 +1,2 @@ +error: foo.proto:3:9: two files +error: foo.proto:3:9: three files diff --git a/experimental/report/testdata/multi-underline.yaml b/experimental/report/testdata/multi-underline.yaml new file mode 100644 index 00000000..ba562066 --- /dev/null +++ b/experimental/report/testdata/multi-underline.yaml @@ -0,0 +1,49 @@ +# Copyright 2020-2024 Buf Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# buf.compiler.v1.Report + +files: + - path: "foo.proto" + text: | + syntax = "proto4" + + package abc.xyz; + + message Blah { + required size_t x = 0; + } + +diagnostics: + - message: "`size_t` is not a built-in Protobuf type" + level: LEVEL_ERROR + annotations: + - file: 0 + start: 63 + end: 68 + - message: "syntax version specified here" + file: 0 + start: 9 + end: 17 + - message: "these are pretty bad names" + level: LEVEL_WARNING + annotations: + - message: "could be better" + file: 0 + start: 27 + end: 34 + - message: "blah to you too!!" + file: 0 + start: 45 + end: 49 \ No newline at end of file diff --git a/experimental/report/testdata/multi-underline.yaml.color.txt b/experimental/report/testdata/multi-underline.yaml.color.txt new file mode 100755 index 00000000..cfedf336 --- /dev/null +++ b/experimental/report/testdata/multi-underline.yaml.color.txt @@ -0,0 +1,21 @@ +⟨b.red⟩error: `size_t` is not a built-in Protobuf type⟨reset⟩ +⟨blu⟩ --> foo.proto:6:12 +⟨blu⟩ | +⟨blu⟩ 1 | ⟨reset⟩syntax = "proto4" +⟨blu⟩ | ⟨reset⟩ ⟨b.blu⟩--------⟨reset⟩ ⟨b.blu⟩syntax version specified here +⟨blu⟩ 2 | ⟨reset⟩ +⟨blu⟩... +⟨blu⟩ 6 | ⟨reset⟩ required size_t x = 0; +⟨blu⟩ | ⟨reset⟩ ⟨b.red⟩^^^^^⟨reset⟩ ⟨b.red⟩⟨reset⟩ + +⟨b.ylw⟩warning: these are pretty bad names⟨reset⟩ +⟨blu⟩ --> foo.proto:3:9 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz; +⟨blu⟩ | ⟨reset⟩ ⟨b.ylw⟩^^^^^^^⟨reset⟩ ⟨b.ylw⟩could be better +⟨blu⟩ 4 | ⟨reset⟩ +⟨blu⟩ 5 | ⟨reset⟩message Blah { +⟨blu⟩ | ⟨reset⟩ ⟨b.blu⟩----⟨reset⟩ ⟨b.blu⟩blah to you too!!⟨reset⟩ + +⟨b.red⟩encountered 1 error and 1 warning⟨reset⟩ +⟨reset⟩ \ No newline at end of file diff --git a/experimental/report/testdata/multi-underline.yaml.fancy.txt b/experimental/report/testdata/multi-underline.yaml.fancy.txt new file mode 100755 index 00000000..182366b7 --- /dev/null +++ b/experimental/report/testdata/multi-underline.yaml.fancy.txt @@ -0,0 +1,20 @@ +error: `size_t` is not a built-in Protobuf type + --> foo.proto:6:12 + | + 1 | syntax = "proto4" + | -------- syntax version specified here + 2 | +... + 6 | required size_t x = 0; + | ^^^^^ + +warning: these are pretty bad names + --> foo.proto:3:9 + | + 3 | package abc.xyz; + | ^^^^^^^ could be better + 4 | + 5 | message Blah { + | ---- blah to you too!! + +encountered 1 error and 1 warning diff --git a/experimental/report/testdata/multi-underline.yaml.simple.txt b/experimental/report/testdata/multi-underline.yaml.simple.txt new file mode 100755 index 00000000..5139ff8e --- /dev/null +++ b/experimental/report/testdata/multi-underline.yaml.simple.txt @@ -0,0 +1,2 @@ +error: foo.proto:6:12: `size_t` is not a built-in Protobuf type +warning: foo.proto:3:9: these are pretty bad names diff --git a/experimental/report/testdata/multiline.yaml b/experimental/report/testdata/multiline.yaml new file mode 100644 index 00000000..c908bcfb --- /dev/null +++ b/experimental/report/testdata/multiline.yaml @@ -0,0 +1,148 @@ +# Copyright 2020-2024 Buf Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# buf.compiler.v1.Report + +files: + - path: "foo.proto" + text: | + syntax = "proto4" + + package abc.xyz; + + message Blah { + required size_t x = 0; + message Bonk { + field + field + field + } + } + +diagnostics: + - message: whole block + level: LEVEL_WARNING + annotations: + - message: "this block" + file: 0 + start: 38 + end: 129 + + - message: nested blocks + level: LEVEL_WARNING + annotations: + - message: "this block" + file: 0 + start: 38 + end: 129 + - message: "and this block" + file: 0 + start: 80 + end: 127 + + - message: nested blocks same start + level: LEVEL_WARNING + annotations: + - message: "this block" + file: 0 + start: 38 + end: 129 + - message: "and this block" + file: 0 + start: 38 + end: 127 + + - message: nested blocks same end + level: LEVEL_WARNING + annotations: + - message: "this block" + file: 0 + start: 38 + end: 129 + - message: "and this block" + file: 0 + start: 80 + end: 129 + + - message: nested overlap + level: LEVEL_WARNING + annotations: + - message: "this block" + file: 0 + start: 38 + end: 127 + - message: "and this block" + file: 0 + start: 80 + end: 129 + + - message: nesting just the braces + level: LEVEL_WARNING + annotations: + - message: "this block" + file: 0 + start: 51 + end: 129 + - message: "and this block" + file: 0 + start: 93 + end: 127 + + - message: nesting just the braces same start + level: LEVEL_WARNING + annotations: + - message: "this block" + file: 0 + start: 51 + end: 129 + - message: "and this block" + file: 0 + start: 51 + end: 127 + + - message: nesting just the braces same start (2) + level: LEVEL_WARNING + annotations: + - message: "and this block" + file: 0 + start: 51 + end: 127 + - message: "this block" + file: 0 + start: 51 + end: 129 + + - message: braces nesting overlap + level: LEVEL_WARNING + annotations: + - message: "this block" + file: 0 + start: 51 + end: 127 + - message: "and this block" + file: 0 + start: 93 + end: 129 + + - message: braces nesting overlap (2) + level: LEVEL_WARNING + annotations: + - message: "and this block" + file: 0 + start: 93 + end: 129 + - message: "this block" + file: 0 + start: 51 + end: 127 \ No newline at end of file diff --git a/experimental/report/testdata/multiline.yaml.color.txt b/experimental/report/testdata/multiline.yaml.color.txt new file mode 100755 index 00000000..6b7f34a4 --- /dev/null +++ b/experimental/report/testdata/multiline.yaml.color.txt @@ -0,0 +1,121 @@ +⟨b.ylw⟩warning: whole block⟨reset⟩ +⟨blu⟩ --> foo.proto:5:2 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩/ ⟨reset⟩message Blah { +⟨blu⟩... ⟨b.ylw⟩| +⟨blu⟩12 | ⟨b.ylw⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.ylw⟩\_^ this block⟨reset⟩ + +⟨b.ylw⟩warning: nested blocks⟨reset⟩ +⟨blu⟩ --> foo.proto:5:2 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩/ ⟨reset⟩message Blah { +⟨blu⟩ 6 | ⟨b.ylw⟩| ⟨reset⟩ required size_t x = 0; +⟨blu⟩ 7 | ⟨b.ylw⟩| ⟨b.blu⟩/ ⟨reset⟩ message Bonk { +⟨blu⟩... ⟨b.ylw⟩| ⟨b.blu⟩| +⟨blu⟩11 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩ } +⟨blu⟩ | ⟨b.ylw⟩| ⟨b.blu⟩\___- and this block +⟨blu⟩12 | ⟨b.ylw⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.ylw⟩\___^ this block⟨reset⟩ + +⟨b.ylw⟩warning: nested blocks same start⟨reset⟩ +⟨blu⟩ --> foo.proto:5:2 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩/ ⟨b.blu⟩/ ⟨reset⟩message Blah { +⟨blu⟩... ⟨b.ylw⟩| ⟨b.blu⟩| +⟨blu⟩11 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩ } +⟨blu⟩ | ⟨b.ylw⟩| ⟨b.blu⟩\___- and this block +⟨blu⟩12 | ⟨b.ylw⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.ylw⟩\___^ this block⟨reset⟩ + +⟨b.ylw⟩warning: nested blocks same end⟨reset⟩ +⟨blu⟩ --> foo.proto:5:2 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩/ ⟨reset⟩message Blah { +⟨blu⟩ 6 | ⟨b.ylw⟩| ⟨reset⟩ required size_t x = 0; +⟨blu⟩ 7 | ⟨b.ylw⟩| ⟨b.blu⟩/ ⟨reset⟩ message Bonk { +⟨blu⟩... ⟨b.ylw⟩| ⟨b.blu⟩| +⟨blu⟩12 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.ylw⟩\___^ this block +⟨blu⟩ | ⟨b.ylw⟩ ⟨b.blu⟩\_- and this block⟨reset⟩ + +⟨b.ylw⟩warning: nested overlap⟨reset⟩ +⟨blu⟩ --> foo.proto:5:2 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩/ ⟨reset⟩message Blah { +⟨blu⟩ 6 | ⟨b.ylw⟩| ⟨reset⟩ required size_t x = 0; +⟨blu⟩ 7 | ⟨b.ylw⟩| ⟨b.blu⟩/ ⟨reset⟩ message Bonk { +⟨blu⟩... ⟨b.ylw⟩| ⟨b.blu⟩| +⟨blu⟩11 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩ } +⟨blu⟩ | ⟨b.ylw⟩\_____^ this block +⟨blu⟩12 | ⟨b.blu⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.blu⟩\_- and this block⟨reset⟩ + +⟨b.ylw⟩warning: nesting just the braces⟨reset⟩ +⟨blu⟩ --> foo.proto:5:15 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩ ⟨reset⟩message Blah { +⟨blu⟩ | ⟨b.ylw⟩ ________________^ +⟨blu⟩ 6 | ⟨b.ylw⟩/ ⟨reset⟩ required size_t x = 0; +⟨blu⟩ 7 | ⟨b.ylw⟩| ⟨b.blu⟩ ⟨reset⟩ message Bonk { +⟨blu⟩ | ⟨b.ylw⟩| ⟨b.blu⟩ ________________- +⟨blu⟩... ⟨b.ylw⟩| ⟨b.blu⟩| +⟨blu⟩11 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩ } +⟨blu⟩ | ⟨b.ylw⟩| ⟨b.blu⟩\___- and this block +⟨blu⟩12 | ⟨b.ylw⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.ylw⟩\___^ this block⟨reset⟩ + +⟨b.ylw⟩warning: nesting just the braces same start⟨reset⟩ +⟨blu⟩ --> foo.proto:5:15 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩ ⟨b.blu⟩ ⟨reset⟩message Blah { +⟨blu⟩ | ⟨b.ylw⟩ ________________^ +⟨blu⟩ | ⟨b.ylw⟩/ ⟨b.blu⟩ ______________- +⟨blu⟩... ⟨b.ylw⟩| ⟨b.blu⟩/ +⟨blu⟩11 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩ } +⟨blu⟩ | ⟨b.ylw⟩| ⟨b.blu⟩\___- and this block +⟨blu⟩12 | ⟨b.ylw⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.ylw⟩\___^ this block⟨reset⟩ + +⟨b.ylw⟩warning: nesting just the braces same start (2)⟨reset⟩ +⟨blu⟩ --> foo.proto:5:15 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩ ⟨b.blu⟩ ⟨reset⟩message Blah { +⟨blu⟩ | ⟨b.ylw⟩ ________________^ +⟨blu⟩ | ⟨b.ylw⟩/ ⟨b.blu⟩ ______________- +⟨blu⟩... ⟨b.ylw⟩| ⟨b.blu⟩/ +⟨blu⟩11 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩ } +⟨blu⟩ | ⟨b.ylw⟩\_____^ and this block +⟨blu⟩12 | ⟨b.blu⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.blu⟩\_- this block⟨reset⟩ + +⟨b.ylw⟩warning: braces nesting overlap⟨reset⟩ +⟨blu⟩ --> foo.proto:5:15 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.ylw⟩ ⟨reset⟩message Blah { +⟨blu⟩ | ⟨b.ylw⟩ ________________^ +⟨blu⟩ 6 | ⟨b.ylw⟩/ ⟨reset⟩ required size_t x = 0; +⟨blu⟩ 7 | ⟨b.ylw⟩| ⟨b.blu⟩ ⟨reset⟩ message Bonk { +⟨blu⟩ | ⟨b.ylw⟩| ⟨b.blu⟩ ________________- +⟨blu⟩... ⟨b.ylw⟩| ⟨b.blu⟩/ +⟨blu⟩11 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩ } +⟨blu⟩ | ⟨b.ylw⟩\_____^ this block +⟨blu⟩12 | ⟨b.blu⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.blu⟩\_- and this block⟨reset⟩ + +⟨b.ylw⟩warning: braces nesting overlap (2)⟨reset⟩ +⟨blu⟩ --> foo.proto:7:17 +⟨blu⟩ | +⟨blu⟩ 5 | ⟨b.blu⟩ ⟨reset⟩message Blah { +⟨blu⟩ | ⟨b.blu⟩ ______________- +⟨blu⟩ 6 | ⟨b.blu⟩/ ⟨reset⟩ required size_t x = 0; +⟨blu⟩ 7 | ⟨b.ylw⟩ ⟨b.blu⟩| ⟨reset⟩ message Bonk { +⟨blu⟩ | ⟨b.ylw⟩ __________________^ +⟨blu⟩... ⟨b.ylw⟩/ ⟨b.blu⟩| +⟨blu⟩11 | ⟨b.ylw⟩| ⟨b.blu⟩| ⟨reset⟩ } +⟨blu⟩ | ⟨b.ylw⟩| ⟨b.blu⟩\___- this block +⟨blu⟩12 | ⟨b.ylw⟩| ⟨reset⟩} +⟨blu⟩ | ⟨b.ylw⟩\___^ and this block⟨reset⟩ + +⟨b.ylw⟩ encountered 10 warnings +⟨reset⟩ \ No newline at end of file diff --git a/experimental/report/testdata/multiline.yaml.fancy.txt b/experimental/report/testdata/multiline.yaml.fancy.txt new file mode 100755 index 00000000..ad654eb5 --- /dev/null +++ b/experimental/report/testdata/multiline.yaml.fancy.txt @@ -0,0 +1,120 @@ +warning: whole block + --> foo.proto:5:2 + | + 5 | / message Blah { +... | +12 | | } + | \_^ this block + +warning: nested blocks + --> foo.proto:5:2 + | + 5 | / message Blah { + 6 | | required size_t x = 0; + 7 | | / message Bonk { +... | | +11 | | | } + | | \___- and this block +12 | | } + | \___^ this block + +warning: nested blocks same start + --> foo.proto:5:2 + | + 5 | / / message Blah { +... | | +11 | | | } + | | \___- and this block +12 | | } + | \___^ this block + +warning: nested blocks same end + --> foo.proto:5:2 + | + 5 | / message Blah { + 6 | | required size_t x = 0; + 7 | | / message Bonk { +... | | +12 | | | } + | \___^ this block + | \_- and this block + +warning: nested overlap + --> foo.proto:5:2 + | + 5 | / message Blah { + 6 | | required size_t x = 0; + 7 | | / message Bonk { +... | | +11 | | | } + | \_____^ this block +12 | | } + | \_- and this block + +warning: nesting just the braces + --> foo.proto:5:15 + | + 5 | message Blah { + | ________________^ + 6 | / required size_t x = 0; + 7 | | message Bonk { + | | ________________- +... | | +11 | | | } + | | \___- and this block +12 | | } + | \___^ this block + +warning: nesting just the braces same start + --> foo.proto:5:15 + | + 5 | message Blah { + | ________________^ + | / ______________- +... | / +11 | | | } + | | \___- and this block +12 | | } + | \___^ this block + +warning: nesting just the braces same start (2) + --> foo.proto:5:15 + | + 5 | message Blah { + | ________________^ + | / ______________- +... | / +11 | | | } + | \_____^ and this block +12 | | } + | \_- this block + +warning: braces nesting overlap + --> foo.proto:5:15 + | + 5 | message Blah { + | ________________^ + 6 | / required size_t x = 0; + 7 | | message Bonk { + | | ________________- +... | / +11 | | | } + | \_____^ this block +12 | | } + | \_- and this block + +warning: braces nesting overlap (2) + --> foo.proto:7:17 + | + 5 | message Blah { + | ______________- + 6 | / required size_t x = 0; + 7 | | message Bonk { + | __________________^ +... / | +11 | | | } + | | \___- this block +12 | | } + | \___^ and this block + + encountered 10 warnings diff --git a/experimental/report/testdata/multiline.yaml.simple.txt b/experimental/report/testdata/multiline.yaml.simple.txt new file mode 100755 index 00000000..956cf225 --- /dev/null +++ b/experimental/report/testdata/multiline.yaml.simple.txt @@ -0,0 +1,10 @@ +warning: foo.proto:5:2: whole block +warning: foo.proto:5:2: nested blocks +warning: foo.proto:5:2: nested blocks same start +warning: foo.proto:5:2: nested blocks same end +warning: foo.proto:5:2: nested overlap +warning: foo.proto:5:15: nesting just the braces +warning: foo.proto:5:15: nesting just the braces same start +warning: foo.proto:5:15: nesting just the braces same start (2) +warning: foo.proto:5:15: braces nesting overlap +warning: foo.proto:7:17: braces nesting overlap (2) diff --git a/experimental/report/testdata/no-snippets.yaml b/experimental/report/testdata/no-snippets.yaml new file mode 100644 index 00000000..62360465 --- /dev/null +++ b/experimental/report/testdata/no-snippets.yaml @@ -0,0 +1,30 @@ +# Copyright 2020-2024 Buf Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# buf.compiler.v1.Report + +diagnostics: + - message: "system not supported" + level: LEVEL_ERROR + + - message: 'could not open file "foo.proto": os error 2: no such file or directory' + level: LEVEL_ERROR + in_file: foo.proto + + - message: "file consists only of the byte `0xaa`" + level: LEVEL_WARNING + in_file: foo.proto + notes: ["that means that the file is screaming"] + help: ["you should delete it to put it out of its misery"] + debug: ["0xaaaaaaaaaaaaaaaa"] diff --git a/experimental/report/testdata/no-snippets.yaml.color.txt b/experimental/report/testdata/no-snippets.yaml.color.txt new file mode 100755 index 00000000..9d229624 --- /dev/null +++ b/experimental/report/testdata/no-snippets.yaml.color.txt @@ -0,0 +1,13 @@ +⟨b.red⟩error: system not supported⟨reset⟩⟨reset⟩ + +⟨b.red⟩error: could not open file "foo.proto": os error 2: no such file or directory⟨reset⟩ +⟨blu⟩ --> foo.proto⟨reset⟩ + +⟨b.ylw⟩warning: file consists only of the byte `0xaa`⟨reset⟩ +⟨blu⟩ --> foo.proto +⟨blu⟩ = ⟨b.cyn⟩note: ⟨reset⟩that means that the file is screaming +⟨blu⟩ = ⟨b.cyn⟩help: ⟨reset⟩you should delete it to put it out of its misery +⟨blu⟩ = ⟨b.red⟩debug: ⟨reset⟩0xaaaaaaaaaaaaaaaa⟨reset⟩ + +⟨b.red⟩encountered 2 errors and 1 warning⟨reset⟩ +⟨reset⟩ \ No newline at end of file diff --git a/experimental/report/testdata/no-snippets.yaml.fancy.txt b/experimental/report/testdata/no-snippets.yaml.fancy.txt new file mode 100755 index 00000000..d5640c18 --- /dev/null +++ b/experimental/report/testdata/no-snippets.yaml.fancy.txt @@ -0,0 +1,12 @@ +error: system not supported + +error: could not open file "foo.proto": os error 2: no such file or directory + --> foo.proto + +warning: file consists only of the byte `0xaa` + --> foo.proto + = note: that means that the file is screaming + = help: you should delete it to put it out of its misery + = debug: 0xaaaaaaaaaaaaaaaa + +encountered 2 errors and 1 warning diff --git a/experimental/report/testdata/no-snippets.yaml.simple.txt b/experimental/report/testdata/no-snippets.yaml.simple.txt new file mode 100755 index 00000000..fb79beac --- /dev/null +++ b/experimental/report/testdata/no-snippets.yaml.simple.txt @@ -0,0 +1,3 @@ +error: system not supported +error: foo.proto: could not open file "foo.proto": os error 2: no such file or directory +warning: foo.proto: file consists only of the byte `0xaa` diff --git a/experimental/report/testdata/single-line.yaml b/experimental/report/testdata/single-line.yaml new file mode 100644 index 00000000..f8e9eacb --- /dev/null +++ b/experimental/report/testdata/single-line.yaml @@ -0,0 +1,107 @@ +# Copyright 2020-2024 Buf Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# buf.compiler.v1.Report + +files: + - path: "foo.proto" + text: | + syntax = "proto4" + + package abc.xyz; + + message Blah { + required size_t x = 0; + } + +diagnostics: + - message: '"proto4" isn''t real, it can''t hurt you' + level: LEVEL_REMARK + annotations: + - message: 'help: change this to "proto5"' + file: 0 + start: 9 + end: 17 + + - message: package + level: LEVEL_ERROR + annotations: + - message: package + file: 0 + start: 19 + end: 26 + - message: semicolon + file: 0 + start: 34 + end: 35 + + - message: this is an overlapping error + level: LEVEL_ERROR + annotations: + - message: package + file: 0 + start: 19 + end: 26 + - message: package decl + file: 0 + start: 19 + end: 35 + + - message: P A C K A G E + level: LEVEL_ERROR + annotations: + - message: "help: p" + file: 0 + start: 19 + end: 20 + - message: "help: ck" + file: 0 + start: 21 + end: 23 + - message: "help: ge" + file: 0 + start: 24 + end: 26 + + - message: P A C K A G E (different order) + level: LEVEL_ERROR + annotations: + - message: "help: ck" + file: 0 + start: 21 + end: 23 + - message: "help: p" + file: 0 + start: 19 + end: 20 + - message: "help: ge" + file: 0 + start: 24 + end: 26 + + - message: P A C K A G E (single letters) + level: LEVEL_ERROR + annotations: + - message: "p" + file: 0 + end: 20 + start: 19 + - message: "k" + file: 0 + start: 21 + end: 23 + - message: "g" + file: 0 + start: 24 + end: 26 diff --git a/experimental/report/testdata/single-line.yaml.color.txt b/experimental/report/testdata/single-line.yaml.color.txt new file mode 100755 index 00000000..12d9caf1 --- /dev/null +++ b/experimental/report/testdata/single-line.yaml.color.txt @@ -0,0 +1,54 @@ +⟨b.cyn⟩remark: "proto4" isn't real, it can't hurt you⟨reset⟩ +⟨blu⟩ --> foo.proto:1:10 +⟨blu⟩ | +⟨blu⟩ 1 | ⟨reset⟩syntax = "proto4" +⟨blu⟩ | ⟨reset⟩ ⟨b.cyn⟩^^^^^^^^⟨reset⟩ ⟨b.cyn⟩help: change this to "proto5"⟨reset⟩ + +⟨b.red⟩error: package⟨reset⟩ +⟨blu⟩ --> foo.proto:3:1 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz; +⟨blu⟩ | ⟨b.red⟩^^^^^^^⟨reset⟩ ⟨b.blu⟩-⟨reset⟩ ⟨b.blu⟩semicolon +⟨blu⟩ | ⟨b.red⟩| +⟨blu⟩ | ⟨b.red⟩package⟨reset⟩ + +⟨b.red⟩error: this is an overlapping error⟨reset⟩ +⟨blu⟩ --> foo.proto:3:1 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz; +⟨blu⟩ | ⟨b.red⟩^^^^^^^⟨b.blu⟩---------⟨reset⟩ ⟨b.blu⟩package decl +⟨blu⟩ | ⟨b.red⟩| +⟨blu⟩ | ⟨b.red⟩package⟨reset⟩ + +⟨b.red⟩error: P A C K A G E⟨reset⟩ +⟨blu⟩ --> foo.proto:3:1 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz; +⟨blu⟩ | ⟨b.red⟩^⟨reset⟩ ⟨b.blu⟩--⟨reset⟩ ⟨b.blu⟩--⟨reset⟩ ⟨b.blu⟩help: ge +⟨blu⟩ | ⟨b.red⟩| ⟨b.blu⟩| +⟨blu⟩ | ⟨b.red⟩help: p⟨b.blu⟩| +⟨blu⟩ | ⟨b.blu⟩| +⟨blu⟩ | ⟨b.blu⟩help: ck⟨reset⟩ + +⟨b.red⟩error: P A C K A G E (different order)⟨reset⟩ +⟨blu⟩ --> foo.proto:3:3 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz; +⟨blu⟩ | ⟨b.blu⟩-⟨reset⟩ ⟨b.red⟩^^⟨reset⟩ ⟨b.blu⟩--⟨reset⟩ ⟨b.blu⟩help: ge +⟨blu⟩ | ⟨b.blu⟩| ⟨b.red⟩| +⟨blu⟩ | ⟨b.blu⟩| ⟨b.red⟩help: ck +⟨blu⟩ | ⟨b.blu⟩| +⟨blu⟩ | ⟨b.blu⟩help: p⟨reset⟩ + +⟨b.red⟩error: P A C K A G E (single letters)⟨reset⟩ +⟨blu⟩ --> foo.proto:3:1 +⟨blu⟩ | +⟨blu⟩ 3 | ⟨reset⟩package abc.xyz; +⟨blu⟩ | ⟨b.red⟩^⟨reset⟩ ⟨b.blu⟩--⟨reset⟩ ⟨b.blu⟩--⟨reset⟩ ⟨b.blu⟩g +⟨blu⟩ | ⟨b.red⟩| ⟨b.blu⟩| +⟨blu⟩ | ⟨b.red⟩p ⟨b.blu⟩| +⟨blu⟩ | ⟨b.blu⟩| +⟨blu⟩ | ⟨b.blu⟩k⟨reset⟩ + +⟨b.red⟩encountered 5 errors⟨reset⟩ +⟨reset⟩ \ No newline at end of file diff --git a/experimental/report/testdata/single-line.yaml.fancy.txt b/experimental/report/testdata/single-line.yaml.fancy.txt new file mode 100755 index 00000000..792df92b --- /dev/null +++ b/experimental/report/testdata/single-line.yaml.fancy.txt @@ -0,0 +1,53 @@ +remark: "proto4" isn't real, it can't hurt you + --> foo.proto:1:10 + | + 1 | syntax = "proto4" + | ^^^^^^^^ help: change this to "proto5" + +error: package + --> foo.proto:3:1 + | + 3 | package abc.xyz; + | ^^^^^^^ - semicolon + | | + | package + +error: this is an overlapping error + --> foo.proto:3:1 + | + 3 | package abc.xyz; + | ^^^^^^^--------- package decl + | | + | package + +error: P A C K A G E + --> foo.proto:3:1 + | + 3 | package abc.xyz; + | ^ -- -- help: ge + | | | + | help: p + | | + | help: ck + +error: P A C K A G E (different order) + --> foo.proto:3:3 + | + 3 | package abc.xyz; + | - ^^ -- help: ge + | | | + | | help: ck + | | + | help: p + +error: P A C K A G E (single letters) + --> foo.proto:3:1 + | + 3 | package abc.xyz; + | ^ -- -- g + | | | + | p | + | | + | k + +encountered 5 errors diff --git a/experimental/report/testdata/single-line.yaml.simple.txt b/experimental/report/testdata/single-line.yaml.simple.txt new file mode 100755 index 00000000..202efdac --- /dev/null +++ b/experimental/report/testdata/single-line.yaml.simple.txt @@ -0,0 +1,6 @@ +remark: foo.proto:1:10: "proto4" isn't real, it can't hurt you +error: foo.proto:3:1: package +error: foo.proto:3:1: this is an overlapping error +error: foo.proto:3:1: P A C K A G E +error: foo.proto:3:3: P A C K A G E (different order) +error: foo.proto:3:1: P A C K A G E (single letters) diff --git a/experimental/report/testdata/tabstops.yaml b/experimental/report/testdata/tabstops.yaml new file mode 100644 index 00000000..9086b655 --- /dev/null +++ b/experimental/report/testdata/tabstops.yaml @@ -0,0 +1,43 @@ +# Copyright 2020-2024 Buf Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# buf.compiler.v1.Report + +files: + - path: "foo.proto" + text: "syntax = \"proto4\"\npackage abc.xyz;\nmessage Blah {\n\trequired size_t x = 0;\n\tmessage Bonk {\n\t\tfield\n \tfield\n field}\n\t}\n" + +diagnostics: + - message: tabstop + level: LEVEL_WARNING + annotations: + - message: "this is in front of some tabstops" + file: 0 + start: 92 + end: 97 + - message: "specifically these" + file: 0 + start: 90 + end: 92 + - message: partial tabstop + level: LEVEL_WARNING + annotations: + - message: "tabstop" + file: 0 + start: 99 + end: 100 + - message: "spaces" + file: 0 + start: 98 + end: 99 \ No newline at end of file diff --git a/experimental/report/testdata/tabstops.yaml.color.txt b/experimental/report/testdata/tabstops.yaml.color.txt new file mode 100755 index 00000000..1e7bd155 --- /dev/null +++ b/experimental/report/testdata/tabstops.yaml.color.txt @@ -0,0 +1,18 @@ +⟨b.ylw⟩warning: tabstop⟨reset⟩ +⟨blu⟩ --> foo.proto:6:9 +⟨blu⟩ | +⟨blu⟩ 6 | ⟨reset⟩ field +⟨blu⟩ | ⟨b.blu⟩--------⟨b.ylw⟩^^^^^⟨reset⟩ ⟨b.ylw⟩this is in front of some tabstops +⟨blu⟩ | ⟨b.blu⟩| +⟨blu⟩ | ⟨b.blu⟩specifically these⟨reset⟩ + +⟨b.ylw⟩warning: partial tabstop⟨reset⟩ +⟨blu⟩ --> foo.proto:7:2 +⟨blu⟩ | +⟨blu⟩ 7 | ⟨reset⟩ field +⟨blu⟩ | ⟨b.blu⟩-⟨b.ylw⟩^^^⟨reset⟩ ⟨b.ylw⟩tabstop +⟨blu⟩ | ⟨b.blu⟩| +⟨blu⟩ | ⟨b.blu⟩spaces⟨reset⟩ + +⟨b.ylw⟩ encountered 2 warnings +⟨reset⟩ \ No newline at end of file diff --git a/experimental/report/testdata/tabstops.yaml.fancy.txt b/experimental/report/testdata/tabstops.yaml.fancy.txt new file mode 100755 index 00000000..954bf3bf --- /dev/null +++ b/experimental/report/testdata/tabstops.yaml.fancy.txt @@ -0,0 +1,17 @@ +warning: tabstop + --> foo.proto:6:9 + | + 6 | field + | --------^^^^^ this is in front of some tabstops + | | + | specifically these + +warning: partial tabstop + --> foo.proto:7:2 + | + 7 | field + | -^^^ tabstop + | | + | spaces + + encountered 2 warnings diff --git a/experimental/report/testdata/tabstops.yaml.simple.txt b/experimental/report/testdata/tabstops.yaml.simple.txt new file mode 100755 index 00000000..5b44f581 --- /dev/null +++ b/experimental/report/testdata/tabstops.yaml.simple.txt @@ -0,0 +1,2 @@ +warning: foo.proto:6:9: tabstop +warning: foo.proto:7:2: partial tabstop diff --git a/go.mod b/go.mod index e22ff881..cb761653 100644 --- a/go.mod +++ b/go.mod @@ -3,16 +3,17 @@ module github.com/bufbuild/protocompile go 1.21 require ( + github.com/bmatcuk/doublestar/v4 v4.6.1 github.com/google/go-cmp v0.6.0 + github.com/pmezard/go-difflib v1.0.0 + github.com/rivo/uniseg v0.4.7 github.com/stretchr/testify v1.9.0 + github.com/tidwall/pretty v1.2.1 golang.org/x/sync v0.8.0 google.golang.org/protobuf v1.34.2 + gopkg.in/yaml.v3 v3.0.1 ) -require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect -) +require github.com/davecgh/go-spew v1.1.1 // indirect retract v0.5.0 // Contains deadlock error diff --git a/go.sum b/go.sum index 6320a110..87364e3f 100644 --- a/go.sum +++ b/go.sum @@ -1,11 +1,17 @@ +github.com/bmatcuk/doublestar/v4 v4.6.1 h1:FH9SifrbvJhnlQpztAx++wlkk70QBf0iBWDwNy7PA4I= +github.com/bmatcuk/doublestar/v4 v4.6.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= diff --git a/go.work b/go.work index ba2d9c0d..e737313e 100644 --- a/go.work +++ b/go.work @@ -1,4 +1,4 @@ -go 1.21 +go 1.23 use ( . diff --git a/go.work.sum b/go.work.sum index d977cf05..bdffaec7 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1,3 +1,5 @@ +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.34.2-20240717164558-a6c49f84cc0f.2 h1:SZRVx928rbYZ6hEKUIN+vtGDkl7uotABRWGY4OAg5gM= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.34.2-20240717164558-a6c49f84cc0f.2/go.mod h1:ylS4c28ACSI59oJrOdW4pHS4n0Hw4TgSPHn8rpHl4Yw= cloud.google.com/go v0.26.0 h1:e0WKqKTd5BnrG8aKH3J3h+QvEIQtSUcf2n5UZ5ZgLtQ= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= @@ -21,6 +23,8 @@ github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAE github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/bufbuild/protocompile v0.2.1-0.20230123224550-da57cd758c2f/go.mod h1:tleDrpPTlLUVmgnEoN6qBliKWqJaZFJXqZdFjTd+ocU= github.com/bufbuild/protocompile v0.13.0/go.mod h1:dr++fGGeMPWHv7jPeT06ZKukm45NJscd7rUxQVzEKRk= +github.com/bufbuild/protovalidate-go v0.6.3 h1:wxQyzW035zM16Binbaz/nWAzS12dRIXhZdSUWRY7Fv0= +github.com/bufbuild/protovalidate-go v0.6.3/go.mod h1:J4PtwP9Z2YAGgB0+o+tTWEDtLtXvz/gfhFZD8pbzM/U= github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= @@ -37,6 +41,7 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.m github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0 h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A= +github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/mock v1.1.1 h1:G5FRp8JnTd7RQH5kemVNlMeyXQAztQ3mOWV95KxsXH8= @@ -65,27 +70,29 @@ github.com/jhump/goprotoc v0.5.0 h1:Y1UgUX+txUznfqcGdDef8ZOVlyQvnV0pKWZH08RmZuo= github.com/jhump/protoreflect v1.15.0 h1:U5T5/2LF0AZQFP9T4W5GfBjBaTruomrKobiR4E+oA/Q= github.com/jhump/protoreflect v1.15.0/go.mod h1:qww51KYjD2hoCl/ohxw5cK2LSssFczrbO1t8Ld2TENs= github.com/kr/pty v1.1.1 h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 h1:gQz4mCbXsO+nc9n1hCxHcGA3Zx3Eo+UHZoInFGUIXNM= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/yuin/goldmark v1.7.4/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= +golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= +golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3 h1:XQyxROzUlZH+WIQwySDgnISgOivlhjIEwaQaJEJrrN0= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57 h1:LQmS1nU0twXLA96Kt7U9qtHJEbBk3z6Q0V4UXjZkpr4= golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= @@ -98,6 +105,8 @@ golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= +golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= +golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= @@ -109,6 +118,7 @@ golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -129,24 +139,33 @@ golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk= +golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= +golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM= +golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= +golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023 h1:0c3L82FDQ5rt1bjTBlchS8t6RQ6299/+5bWMnRLh+uI= golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.0/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -211,6 +230,9 @@ google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda h1:b6F6WIV4xHHD0FA4oIyzU6mHWg2WI2X1RBehwa5QN38= +google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda/go.mod h1:AHcE/gZH76Bk/ROZhQphlRoWo5xKDEtz3eVEO1LfA8c= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= @@ -224,6 +246,7 @@ google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ5 google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.67.0/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= @@ -231,5 +254,4 @@ google.golang.org/protobuf v1.28.2-0.20220831092852-f930b1dc76e8/go.mod h1:HV8QO google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs= diff --git a/internal/arena/arena.go b/internal/arena/arena.go new file mode 100644 index 00000000..67ab17e7 --- /dev/null +++ b/internal/arena/arena.go @@ -0,0 +1,192 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package arena defines an Arena type with compressed pointers. +package arena + +import ( + "fmt" + "math/bits" + "strings" +) + +// pointersMinLenShift is the log2 of the size of the smallest slice in +// a pointers[T]. +const ( + pointersMinLenShift = 4 + pointersMinLen = 1 << pointersMinLenShift +) + +// An untyped arena pointer. +// +// The pointer value of a particular pointer in an arena is equal to one +// plus the number of elements allocated before it. +type Untyped uint32 + +// Nil returns a nil arena pointer. +func Nil() Untyped { + return 0 +} + +// Nil returns whether this pointer is nil. +func (p Untyped) Nil() bool { + return p == 0 +} + +// A compressed arena pointer. +// +// Cannot be dereferenced directly; see [Pointer.In]. +// +// The zero value is nil. +type Pointer[T any] Untyped + +// Nil returns whether this pointer is nil. +func (p Pointer[T]) Nil() bool { + return Untyped(p).Nil() +} + +// Looks up this pointer in the given arena. +// +// arena must be the arena tha allocated this pointer, otherwise this will +// either return an arbitrary pointer or panic. If p is nil, this panics. +func (p Pointer[T]) In(arena *Arena[T]) *T { + return arena.At(Untyped(p)) +} + +// Arena is an arena that offers compressed pointers. Internally, it is a slice +// of T that guarantees the Ts will never be moved. +// +// It does this by maintaining a table of logarithmically-growing slices that +// mimic the resizing behavior of an ordinary slice. This trades off the linear +// 8-byte overhead of []*T for a logarithmic 24-byte overhead. Lookup time +// remains O(1), at the cost of two pointer loads instead of one. +// +// A zero Arena[T] is empty and ready to use. +type Arena[T any] struct { + // Invariants: + // 1. cap(table[0]) == 1<= a.len() || idx < 0 { + panic(fmt.Sprintf("arena: pointer out of range: %#x", idx)) + } + + // Given pointersMinLenShift == n, the cumulative starting index of each slice is + // + // 0b0 << n, 0b1 << n, 0b11 << n, 0b111 << n + // + // Thus, to find which slice an index corresponds to, we add 0b1 << n (pointersMinLen). + // Because << distributes over addition, we get + // + // 0b1 << n, 0b10 << n, 0b100 << n, 0b1000 << n + // + // Taking the one-indexed high order bit, which maps this sequence to + // + // 1+n, 2+n, 3+n, 4+n + // + // We can subtract off n+1 to obtain the actual slice index: + // + // 0, 1, 2, 3 + + slice := bits.UintSize - bits.LeadingZeros(uint(idx)+pointersMinLen) + slice -= pointersMinLenShift + 1 + + // Then, the offset within table[slice] is given by subtracting off the + // length of all prior slices from idx. + idx -= a.lenOfFirstNSlices(slice) + + return slice, idx +} diff --git a/internal/arena/arena_test.go b/internal/arena/arena_test.go new file mode 100644 index 00000000..d9411761 --- /dev/null +++ b/internal/arena/arena_test.go @@ -0,0 +1,48 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package arena_test + +import ( + "testing" + + "github.com/bufbuild/protocompile/internal/arena" + "github.com/stretchr/testify/assert" +) + +func TestPointers(t *testing.T) { + assert := assert.New(t) + + var a arena.Arena[int] + + p1 := a.New(5) + p2 := p1.In(&a) + assert.Equal(5, *p1.In(&a)) + + for i := 0; i < 16; i++ { + a.New(i + 5) + } + assert.Equal(19, *arena.Pointer[int](16).In(&a)) + assert.Equal(20, *arena.Pointer[int](17).In(&a)) + assert.True(p1.In(&a) == p2) + + for i := 0; i < 32; i++ { + a.New(i + 21) + } + assert.Equal(51, *arena.Pointer[int](48).In(&a)) + assert.Equal(52, *arena.Pointer[int](49).In(&a)) + assert.True(p1.In(&a) == p2) + + assert.Equal("[5 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19|20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51|52]", a.String()) +} diff --git a/internal/gen/buf/compiler/v1/ast.pb.go b/internal/gen/buf/compiler/v1/ast.pb.go new file mode 100644 index 00000000..c9afc858 --- /dev/null +++ b/internal/gen/buf/compiler/v1/ast.pb.go @@ -0,0 +1,3446 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.34.2 +// protoc (unknown) +// source: buf/compiler/v1/ast.proto + +package compilerv1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Path_Component_Separator int32 + +const ( + Path_Component_SEPARATOR_UNSPECIFIED Path_Component_Separator = 0 + Path_Component_SEPARATOR_DOT Path_Component_Separator = 1 + Path_Component_SEPARATOR_SLASH Path_Component_Separator = 2 +) + +// Enum value maps for Path_Component_Separator. +var ( + Path_Component_Separator_name = map[int32]string{ + 0: "SEPARATOR_UNSPECIFIED", + 1: "SEPARATOR_DOT", + 2: "SEPARATOR_SLASH", + } + Path_Component_Separator_value = map[string]int32{ + "SEPARATOR_UNSPECIFIED": 0, + "SEPARATOR_DOT": 1, + "SEPARATOR_SLASH": 2, + } +) + +func (x Path_Component_Separator) Enum() *Path_Component_Separator { + p := new(Path_Component_Separator) + *p = x + return p +} + +func (x Path_Component_Separator) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Path_Component_Separator) Descriptor() protoreflect.EnumDescriptor { + return file_buf_compiler_v1_ast_proto_enumTypes[0].Descriptor() +} + +func (Path_Component_Separator) Type() protoreflect.EnumType { + return &file_buf_compiler_v1_ast_proto_enumTypes[0] +} + +func (x Path_Component_Separator) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Path_Component_Separator.Descriptor instead. +func (Path_Component_Separator) EnumDescriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{2, 0, 0} +} + +type Decl_Syntax_Kind int32 + +const ( + Decl_Syntax_KIND_UNSPECIFIED Decl_Syntax_Kind = 0 + Decl_Syntax_KIND_SYNTAX Decl_Syntax_Kind = 1 + Decl_Syntax_KIND_EDITION Decl_Syntax_Kind = 2 +) + +// Enum value maps for Decl_Syntax_Kind. +var ( + Decl_Syntax_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "KIND_SYNTAX", + 2: "KIND_EDITION", + } + Decl_Syntax_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "KIND_SYNTAX": 1, + "KIND_EDITION": 2, + } +) + +func (x Decl_Syntax_Kind) Enum() *Decl_Syntax_Kind { + p := new(Decl_Syntax_Kind) + *p = x + return p +} + +func (x Decl_Syntax_Kind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Decl_Syntax_Kind) Descriptor() protoreflect.EnumDescriptor { + return file_buf_compiler_v1_ast_proto_enumTypes[1].Descriptor() +} + +func (Decl_Syntax_Kind) Type() protoreflect.EnumType { + return &file_buf_compiler_v1_ast_proto_enumTypes[1] +} + +func (x Decl_Syntax_Kind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Decl_Syntax_Kind.Descriptor instead. +func (Decl_Syntax_Kind) EnumDescriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 1, 0} +} + +type Decl_Import_Modifier int32 + +const ( + Decl_Import_MODIFIER_UNSPECIFIED Decl_Import_Modifier = 0 + Decl_Import_MODIFIER_WEAK Decl_Import_Modifier = 1 + Decl_Import_MODIFIER_PUBLIC Decl_Import_Modifier = 2 +) + +// Enum value maps for Decl_Import_Modifier. +var ( + Decl_Import_Modifier_name = map[int32]string{ + 0: "MODIFIER_UNSPECIFIED", + 1: "MODIFIER_WEAK", + 2: "MODIFIER_PUBLIC", + } + Decl_Import_Modifier_value = map[string]int32{ + "MODIFIER_UNSPECIFIED": 0, + "MODIFIER_WEAK": 1, + "MODIFIER_PUBLIC": 2, + } +) + +func (x Decl_Import_Modifier) Enum() *Decl_Import_Modifier { + p := new(Decl_Import_Modifier) + *p = x + return p +} + +func (x Decl_Import_Modifier) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Decl_Import_Modifier) Descriptor() protoreflect.EnumDescriptor { + return file_buf_compiler_v1_ast_proto_enumTypes[2].Descriptor() +} + +func (Decl_Import_Modifier) Type() protoreflect.EnumType { + return &file_buf_compiler_v1_ast_proto_enumTypes[2] +} + +func (x Decl_Import_Modifier) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Decl_Import_Modifier.Descriptor instead. +func (Decl_Import_Modifier) EnumDescriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 3, 0} +} + +type Decl_Range_Kind int32 + +const ( + Decl_Range_KIND_UNSPECIFIED Decl_Range_Kind = 0 + Decl_Range_KIND_EXTENSIONS Decl_Range_Kind = 1 + Decl_Range_KIND_RESERVED Decl_Range_Kind = 2 +) + +// Enum value maps for Decl_Range_Kind. +var ( + Decl_Range_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "KIND_EXTENSIONS", + 2: "KIND_RESERVED", + } + Decl_Range_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "KIND_EXTENSIONS": 1, + "KIND_RESERVED": 2, + } +) + +func (x Decl_Range_Kind) Enum() *Decl_Range_Kind { + p := new(Decl_Range_Kind) + *p = x + return p +} + +func (x Decl_Range_Kind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Decl_Range_Kind) Descriptor() protoreflect.EnumDescriptor { + return file_buf_compiler_v1_ast_proto_enumTypes[3].Descriptor() +} + +func (Decl_Range_Kind) Type() protoreflect.EnumType { + return &file_buf_compiler_v1_ast_proto_enumTypes[3] +} + +func (x Decl_Range_Kind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Decl_Range_Kind.Descriptor instead. +func (Decl_Range_Kind) EnumDescriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 5, 0} +} + +type Def_Kind int32 + +const ( + Def_KIND_UNSPECIFIED Def_Kind = 0 + Def_KIND_MESSAGE Def_Kind = 1 + Def_KIND_ENUM Def_Kind = 2 + Def_KIND_SERVICE Def_Kind = 3 + Def_KIND_EXTEND Def_Kind = 4 + Def_KIND_FIELD Def_Kind = 5 + Def_KIND_ENUM_VALUE Def_Kind = 6 + Def_KIND_ONEOF Def_Kind = 7 + Def_KIND_GROUP Def_Kind = 8 + Def_KIND_METHOD Def_Kind = 9 + Def_KIND_OPTION Def_Kind = 10 +) + +// Enum value maps for Def_Kind. +var ( + Def_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "KIND_MESSAGE", + 2: "KIND_ENUM", + 3: "KIND_SERVICE", + 4: "KIND_EXTEND", + 5: "KIND_FIELD", + 6: "KIND_ENUM_VALUE", + 7: "KIND_ONEOF", + 8: "KIND_GROUP", + 9: "KIND_METHOD", + 10: "KIND_OPTION", + } + Def_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "KIND_MESSAGE": 1, + "KIND_ENUM": 2, + "KIND_SERVICE": 3, + "KIND_EXTEND": 4, + "KIND_FIELD": 5, + "KIND_ENUM_VALUE": 6, + "KIND_ONEOF": 7, + "KIND_GROUP": 8, + "KIND_METHOD": 9, + "KIND_OPTION": 10, + } +) + +func (x Def_Kind) Enum() *Def_Kind { + p := new(Def_Kind) + *p = x + return p +} + +func (x Def_Kind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Def_Kind) Descriptor() protoreflect.EnumDescriptor { + return file_buf_compiler_v1_ast_proto_enumTypes[4].Descriptor() +} + +func (Def_Kind) Type() protoreflect.EnumType { + return &file_buf_compiler_v1_ast_proto_enumTypes[4] +} + +func (x Def_Kind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Def_Kind.Descriptor instead. +func (Def_Kind) EnumDescriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{4, 0} +} + +type Expr_Prefixed_Prefix int32 + +const ( + Expr_Prefixed_PREFIX_UNSPECIFIED Expr_Prefixed_Prefix = 0 + Expr_Prefixed_PREFIX_MINUS Expr_Prefixed_Prefix = 1 +) + +// Enum value maps for Expr_Prefixed_Prefix. +var ( + Expr_Prefixed_Prefix_name = map[int32]string{ + 0: "PREFIX_UNSPECIFIED", + 1: "PREFIX_MINUS", + } + Expr_Prefixed_Prefix_value = map[string]int32{ + "PREFIX_UNSPECIFIED": 0, + "PREFIX_MINUS": 1, + } +) + +func (x Expr_Prefixed_Prefix) Enum() *Expr_Prefixed_Prefix { + p := new(Expr_Prefixed_Prefix) + *p = x + return p +} + +func (x Expr_Prefixed_Prefix) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Expr_Prefixed_Prefix) Descriptor() protoreflect.EnumDescriptor { + return file_buf_compiler_v1_ast_proto_enumTypes[5].Descriptor() +} + +func (Expr_Prefixed_Prefix) Type() protoreflect.EnumType { + return &file_buf_compiler_v1_ast_proto_enumTypes[5] +} + +func (x Expr_Prefixed_Prefix) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Expr_Prefixed_Prefix.Descriptor instead. +func (Expr_Prefixed_Prefix) EnumDescriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{6, 1, 0} +} + +type Type_Prefixed_Prefix int32 + +const ( + Type_Prefixed_PREFIX_UNSPECIFIED Type_Prefixed_Prefix = 0 + Type_Prefixed_PREFIX_OPTIONAL Type_Prefixed_Prefix = 1 + Type_Prefixed_PREFIX_REPEATED Type_Prefixed_Prefix = 2 + Type_Prefixed_PREFIX_REQUIRED Type_Prefixed_Prefix = 3 + Type_Prefixed_PREFIX_STREAM Type_Prefixed_Prefix = 4 +) + +// Enum value maps for Type_Prefixed_Prefix. +var ( + Type_Prefixed_Prefix_name = map[int32]string{ + 0: "PREFIX_UNSPECIFIED", + 1: "PREFIX_OPTIONAL", + 2: "PREFIX_REPEATED", + 3: "PREFIX_REQUIRED", + 4: "PREFIX_STREAM", + } + Type_Prefixed_Prefix_value = map[string]int32{ + "PREFIX_UNSPECIFIED": 0, + "PREFIX_OPTIONAL": 1, + "PREFIX_REPEATED": 2, + "PREFIX_REQUIRED": 3, + "PREFIX_STREAM": 4, + } +) + +func (x Type_Prefixed_Prefix) Enum() *Type_Prefixed_Prefix { + p := new(Type_Prefixed_Prefix) + *p = x + return p +} + +func (x Type_Prefixed_Prefix) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Type_Prefixed_Prefix) Descriptor() protoreflect.EnumDescriptor { + return file_buf_compiler_v1_ast_proto_enumTypes[6].Descriptor() +} + +func (Type_Prefixed_Prefix) Type() protoreflect.EnumType { + return &file_buf_compiler_v1_ast_proto_enumTypes[6] +} + +func (x Type_Prefixed_Prefix) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Type_Prefixed_Prefix.Descriptor instead. +func (Type_Prefixed_Prefix) EnumDescriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{7, 0, 0} +} + +// A parsed AST file. This is the root file for the whole Protocompile AST. +type File struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The original filesystem file this file was parsed from. + File *Report_File `protobuf:"bytes,1,opt,name=file,proto3" json:"file,omitempty"` + // Declararations in this file. + Decls []*Decl `protobuf:"bytes,2,rep,name=decls,proto3" json:"decls,omitempty"` +} + +func (x *File) Reset() { + *x = File{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *File) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*File) ProtoMessage() {} + +func (x *File) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use File.ProtoReflect.Descriptor instead. +func (*File) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{0} +} + +func (x *File) GetFile() *Report_File { + if x != nil { + return x.File + } + return nil +} + +func (x *File) GetDecls() []*Decl { + if x != nil { + return x.Decls + } + return nil +} + +// A source code span for a specific `File`. +// +// This only contains byte offsets for the span; all other information +// (such as the line number) should be re-computed as needed. +type Span struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Start uint32 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` + End uint32 `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` +} + +func (x *Span) Reset() { + *x = Span{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Span) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Span) ProtoMessage() {} + +func (x *Span) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Span.ProtoReflect.Descriptor instead. +func (*Span) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{1} +} + +func (x *Span) GetStart() uint32 { + if x != nil { + return x.Start + } + return 0 +} + +func (x *Span) GetEnd() uint32 { + if x != nil { + return x.End + } + return 0 +} + +// A path in a Protobuf file. +// +// Protobuf paths are quite complex: they are not just dot-separated squences +// of names: each component can itself be a Protobuf path, such as +// `foo.(bar.(baz)).zing`. Although deep nesting is not currently used in +// the language, this AST supports it. +type Path struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Components []*Path_Component `protobuf:"bytes,1,rep,name=components,proto3" json:"components,omitempty"` + // The span for the whole path. + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` +} + +func (x *Path) Reset() { + *x = Path{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Path) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Path) ProtoMessage() {} + +func (x *Path) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Path.ProtoReflect.Descriptor instead. +func (*Path) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{2} +} + +func (x *Path) GetComponents() []*Path_Component { + if x != nil { + return x.Components + } + return nil +} + +func (x *Path) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +// A declaration in a Protobuf file. +type Decl struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Decl: + // + // *Decl_Empty_ + // *Decl_Syntax_ + // *Decl_Import_ + // *Decl_Package_ + // *Decl_Def + // *Decl_Body_ + // *Decl_Range_ + Decl isDecl_Decl `protobuf_oneof:"decl"` +} + +func (x *Decl) Reset() { + *x = Decl{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Decl) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Decl) ProtoMessage() {} + +func (x *Decl) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Decl.ProtoReflect.Descriptor instead. +func (*Decl) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3} +} + +func (m *Decl) GetDecl() isDecl_Decl { + if m != nil { + return m.Decl + } + return nil +} + +func (x *Decl) GetEmpty() *Decl_Empty { + if x, ok := x.GetDecl().(*Decl_Empty_); ok { + return x.Empty + } + return nil +} + +func (x *Decl) GetSyntax() *Decl_Syntax { + if x, ok := x.GetDecl().(*Decl_Syntax_); ok { + return x.Syntax + } + return nil +} + +func (x *Decl) GetImport() *Decl_Import { + if x, ok := x.GetDecl().(*Decl_Import_); ok { + return x.Import + } + return nil +} + +func (x *Decl) GetPackage() *Decl_Package { + if x, ok := x.GetDecl().(*Decl_Package_); ok { + return x.Package + } + return nil +} + +func (x *Decl) GetDef() *Def { + if x, ok := x.GetDecl().(*Decl_Def); ok { + return x.Def + } + return nil +} + +func (x *Decl) GetBody() *Decl_Body { + if x, ok := x.GetDecl().(*Decl_Body_); ok { + return x.Body + } + return nil +} + +func (x *Decl) GetRange() *Decl_Range { + if x, ok := x.GetDecl().(*Decl_Range_); ok { + return x.Range + } + return nil +} + +type isDecl_Decl interface { + isDecl_Decl() +} + +type Decl_Empty_ struct { + Empty *Decl_Empty `protobuf:"bytes,1,opt,name=empty,proto3,oneof"` +} + +type Decl_Syntax_ struct { + Syntax *Decl_Syntax `protobuf:"bytes,2,opt,name=syntax,proto3,oneof"` +} + +type Decl_Import_ struct { + Import *Decl_Import `protobuf:"bytes,3,opt,name=import,proto3,oneof"` +} + +type Decl_Package_ struct { + Package *Decl_Package `protobuf:"bytes,4,opt,name=package,proto3,oneof"` +} + +type Decl_Def struct { + Def *Def `protobuf:"bytes,5,opt,name=def,proto3,oneof"` +} + +type Decl_Body_ struct { + Body *Decl_Body `protobuf:"bytes,6,opt,name=body,proto3,oneof"` +} + +type Decl_Range_ struct { + Range *Decl_Range `protobuf:"bytes,7,opt,name=range,proto3,oneof"` +} + +func (*Decl_Empty_) isDecl_Decl() {} + +func (*Decl_Syntax_) isDecl_Decl() {} + +func (*Decl_Import_) isDecl_Decl() {} + +func (*Decl_Package_) isDecl_Decl() {} + +func (*Decl_Def) isDecl_Decl() {} + +func (*Decl_Body_) isDecl_Decl() {} + +func (*Decl_Range_) isDecl_Decl() {} + +// A definition is a particular kind of declaration that combines the syntactic +// elements of type definitions, fields, options, and service methods. +// +// This allows the parser to accept and represent many invalid but plausible productions. +type Def struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Definitions without a clear kind may be marked as `KIND_UNSPECIFIED`. + Kind Def_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=buf.compiler.v1.Def_Kind" json:"kind,omitempty"` + Name *Path `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The type for a `KIND_FIELD` definiion. + Type *Type `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"` + Signature *Def_Signature `protobuf:"bytes,4,opt,name=signature,proto3" json:"signature,omitempty"` + // This is the tag number of `KIND_FIELD` or `KIND_ENUM_VALUE, + // or the value of `KIND_OPTION`. + Value *Expr `protobuf:"bytes,5,opt,name=value,proto3" json:"value,omitempty"` + // This is options appearing in `[...]`, such as on `KIND_FIELD` + // or `KIND_GROUP`. This will NOT inclde options on a oneof, since + // those are reprepresented as `KIND_OPTION` `Def` in `body`. + Options *Options `protobuf:"bytes,6,opt,name=options,proto3" json:"options,omitempty"` + // This is a braced body at the end of the definition. + Body *Decl_Body `protobuf:"bytes,7,opt,name=body,proto3" json:"body,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + KeywordSpan *Span `protobuf:"bytes,11,opt,name=keyword_span,json=keywordSpan,proto3" json:"keyword_span,omitempty"` + EqualsSpan *Span `protobuf:"bytes,12,opt,name=equals_span,json=equalsSpan,proto3" json:"equals_span,omitempty"` + SemicolonSpan *Span `protobuf:"bytes,13,opt,name=semicolon_span,json=semicolonSpan,proto3" json:"semicolon_span,omitempty"` +} + +func (x *Def) Reset() { + *x = Def{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Def) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Def) ProtoMessage() {} + +func (x *Def) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Def.ProtoReflect.Descriptor instead. +func (*Def) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{4} +} + +func (x *Def) GetKind() Def_Kind { + if x != nil { + return x.Kind + } + return Def_KIND_UNSPECIFIED +} + +func (x *Def) GetName() *Path { + if x != nil { + return x.Name + } + return nil +} + +func (x *Def) GetType() *Type { + if x != nil { + return x.Type + } + return nil +} + +func (x *Def) GetSignature() *Def_Signature { + if x != nil { + return x.Signature + } + return nil +} + +func (x *Def) GetValue() *Expr { + if x != nil { + return x.Value + } + return nil +} + +func (x *Def) GetOptions() *Options { + if x != nil { + return x.Options + } + return nil +} + +func (x *Def) GetBody() *Decl_Body { + if x != nil { + return x.Body + } + return nil +} + +func (x *Def) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Def) GetKeywordSpan() *Span { + if x != nil { + return x.KeywordSpan + } + return nil +} + +func (x *Def) GetEqualsSpan() *Span { + if x != nil { + return x.EqualsSpan + } + return nil +} + +func (x *Def) GetSemicolonSpan() *Span { + if x != nil { + return x.SemicolonSpan + } + return nil +} + +// Compact options after a declaration, in `[...]`. +type Options struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Entries []*Options_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` +} + +func (x *Options) Reset() { + *x = Options{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Options) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Options) ProtoMessage() {} + +func (x *Options) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Options.ProtoReflect.Descriptor instead. +func (*Options) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{5} +} + +func (x *Options) GetEntries() []*Options_Entry { + if x != nil { + return x.Entries + } + return nil +} + +func (x *Options) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +// An expression, such as the value of an option or the tag of a field. +type Expr struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Expr: + // + // *Expr_Literal_ + // *Expr_Path + // *Expr_Prefixed_ + // *Expr_Range_ + // *Expr_Array_ + // *Expr_Dict_ + // *Expr_Kv_ + Expr isExpr_Expr `protobuf_oneof:"expr"` +} + +func (x *Expr) Reset() { + *x = Expr{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Expr) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Expr) ProtoMessage() {} + +func (x *Expr) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Expr.ProtoReflect.Descriptor instead. +func (*Expr) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{6} +} + +func (m *Expr) GetExpr() isExpr_Expr { + if m != nil { + return m.Expr + } + return nil +} + +func (x *Expr) GetLiteral() *Expr_Literal { + if x, ok := x.GetExpr().(*Expr_Literal_); ok { + return x.Literal + } + return nil +} + +func (x *Expr) GetPath() *Path { + if x, ok := x.GetExpr().(*Expr_Path); ok { + return x.Path + } + return nil +} + +func (x *Expr) GetPrefixed() *Expr_Prefixed { + if x, ok := x.GetExpr().(*Expr_Prefixed_); ok { + return x.Prefixed + } + return nil +} + +func (x *Expr) GetRange() *Expr_Range { + if x, ok := x.GetExpr().(*Expr_Range_); ok { + return x.Range + } + return nil +} + +func (x *Expr) GetArray() *Expr_Array { + if x, ok := x.GetExpr().(*Expr_Array_); ok { + return x.Array + } + return nil +} + +func (x *Expr) GetDict() *Expr_Dict { + if x, ok := x.GetExpr().(*Expr_Dict_); ok { + return x.Dict + } + return nil +} + +func (x *Expr) GetKv() *Expr_Kv { + if x, ok := x.GetExpr().(*Expr_Kv_); ok { + return x.Kv + } + return nil +} + +type isExpr_Expr interface { + isExpr_Expr() +} + +type Expr_Literal_ struct { + Literal *Expr_Literal `protobuf:"bytes,1,opt,name=literal,proto3,oneof"` +} + +type Expr_Path struct { + Path *Path `protobuf:"bytes,2,opt,name=path,proto3,oneof"` +} + +type Expr_Prefixed_ struct { + Prefixed *Expr_Prefixed `protobuf:"bytes,3,opt,name=prefixed,proto3,oneof"` +} + +type Expr_Range_ struct { + Range *Expr_Range `protobuf:"bytes,4,opt,name=range,proto3,oneof"` +} + +type Expr_Array_ struct { + Array *Expr_Array `protobuf:"bytes,5,opt,name=array,proto3,oneof"` +} + +type Expr_Dict_ struct { + Dict *Expr_Dict `protobuf:"bytes,6,opt,name=dict,proto3,oneof"` +} + +type Expr_Kv_ struct { + Kv *Expr_Kv `protobuf:"bytes,7,opt,name=kv,proto3,oneof"` +} + +func (*Expr_Literal_) isExpr_Expr() {} + +func (*Expr_Path) isExpr_Expr() {} + +func (*Expr_Prefixed_) isExpr_Expr() {} + +func (*Expr_Range_) isExpr_Expr() {} + +func (*Expr_Array_) isExpr_Expr() {} + +func (*Expr_Dict_) isExpr_Expr() {} + +func (*Expr_Kv_) isExpr_Expr() {} + +// A type, such as the prefix of a field. +// +// This AST includes many types not present in ordinary Protobuf, such as representations +// for `repeated repeated int32` and `Arbitrary`, among others. +type Type struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Type: + // + // *Type_Path + // *Type_Prefixed_ + // *Type_Generic_ + Type isType_Type `protobuf_oneof:"type"` +} + +func (x *Type) Reset() { + *x = Type{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Type) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Type) ProtoMessage() {} + +func (x *Type) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Type.ProtoReflect.Descriptor instead. +func (*Type) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{7} +} + +func (m *Type) GetType() isType_Type { + if m != nil { + return m.Type + } + return nil +} + +func (x *Type) GetPath() *Path { + if x, ok := x.GetType().(*Type_Path); ok { + return x.Path + } + return nil +} + +func (x *Type) GetPrefixed() *Type_Prefixed { + if x, ok := x.GetType().(*Type_Prefixed_); ok { + return x.Prefixed + } + return nil +} + +func (x *Type) GetGeneric() *Type_Generic { + if x, ok := x.GetType().(*Type_Generic_); ok { + return x.Generic + } + return nil +} + +type isType_Type interface { + isType_Type() +} + +type Type_Path struct { + Path *Path `protobuf:"bytes,1,opt,name=path,proto3,oneof"` +} + +type Type_Prefixed_ struct { + Prefixed *Type_Prefixed `protobuf:"bytes,2,opt,name=prefixed,proto3,oneof"` +} + +type Type_Generic_ struct { + Generic *Type_Generic `protobuf:"bytes,3,opt,name=generic,proto3,oneof"` +} + +func (*Type_Path) isType_Type() {} + +func (*Type_Prefixed_) isType_Type() {} + +func (*Type_Generic_) isType_Type() {} + +// A path component. +type Path_Component struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // May be missing altogehter, for invalid paths like `foo..bar`. + // + // Types that are assignable to Component: + // + // *Path_Component_Ident + // *Path_Component_Extension + Component isPath_Component_Component `protobuf_oneof:"component"` + // The type of separator this component had before it. + // If this is SEPARATOR_UNSPECIFIED, this is the first + // component, and the path is not absolute. + Separator Path_Component_Separator `protobuf:"varint,3,opt,name=separator,proto3,enum=buf.compiler.v1.Path_Component_Separator" json:"separator,omitempty"` + // The span of the component's value. + ComponentSpan *Span `protobuf:"bytes,10,opt,name=component_span,json=componentSpan,proto3" json:"component_span,omitempty"` + // The span of this component's leading dot, if any. + SeparatorSpan *Span `protobuf:"bytes,11,opt,name=separator_span,json=separatorSpan,proto3" json:"separator_span,omitempty"` +} + +func (x *Path_Component) Reset() { + *x = Path_Component{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Path_Component) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Path_Component) ProtoMessage() {} + +func (x *Path_Component) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Path_Component.ProtoReflect.Descriptor instead. +func (*Path_Component) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{2, 0} +} + +func (m *Path_Component) GetComponent() isPath_Component_Component { + if m != nil { + return m.Component + } + return nil +} + +func (x *Path_Component) GetIdent() string { + if x, ok := x.GetComponent().(*Path_Component_Ident); ok { + return x.Ident + } + return "" +} + +func (x *Path_Component) GetExtension() *Path { + if x, ok := x.GetComponent().(*Path_Component_Extension); ok { + return x.Extension + } + return nil +} + +func (x *Path_Component) GetSeparator() Path_Component_Separator { + if x != nil { + return x.Separator + } + return Path_Component_SEPARATOR_UNSPECIFIED +} + +func (x *Path_Component) GetComponentSpan() *Span { + if x != nil { + return x.ComponentSpan + } + return nil +} + +func (x *Path_Component) GetSeparatorSpan() *Span { + if x != nil { + return x.SeparatorSpan + } + return nil +} + +type isPath_Component_Component interface { + isPath_Component_Component() +} + +type Path_Component_Ident struct { + // A single identifier. + Ident string `protobuf:"bytes,1,opt,name=ident,proto3,oneof"` +} + +type Path_Component_Extension struct { + // A nested extension path. + Extension *Path `protobuf:"bytes,2,opt,name=extension,proto3,oneof"` +} + +func (*Path_Component_Ident) isPath_Component_Component() {} + +func (*Path_Component_Extension) isPath_Component_Component() {} + +// An empty declaration. +type Decl_Empty struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` +} + +func (x *Decl_Empty) Reset() { + *x = Decl_Empty{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Decl_Empty) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Decl_Empty) ProtoMessage() {} + +func (x *Decl_Empty) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Decl_Empty.ProtoReflect.Descriptor instead. +func (*Decl_Empty) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 0} +} + +func (x *Decl_Empty) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +// A language pragma, such as a syntax or edition declaration. +type Decl_Syntax struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Kind Decl_Syntax_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=buf.compiler.v1.Decl_Syntax_Kind" json:"kind,omitempty"` + Value *Expr `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + Options *Options `protobuf:"bytes,3,opt,name=options,proto3" json:"options,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + KeywordSpan *Span `protobuf:"bytes,11,opt,name=keyword_span,json=keywordSpan,proto3" json:"keyword_span,omitempty"` + EqualsSpan *Span `protobuf:"bytes,12,opt,name=equals_span,json=equalsSpan,proto3" json:"equals_span,omitempty"` + SemicolonSpan *Span `protobuf:"bytes,13,opt,name=semicolon_span,json=semicolonSpan,proto3" json:"semicolon_span,omitempty"` +} + +func (x *Decl_Syntax) Reset() { + *x = Decl_Syntax{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Decl_Syntax) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Decl_Syntax) ProtoMessage() {} + +func (x *Decl_Syntax) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Decl_Syntax.ProtoReflect.Descriptor instead. +func (*Decl_Syntax) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 1} +} + +func (x *Decl_Syntax) GetKind() Decl_Syntax_Kind { + if x != nil { + return x.Kind + } + return Decl_Syntax_KIND_UNSPECIFIED +} + +func (x *Decl_Syntax) GetValue() *Expr { + if x != nil { + return x.Value + } + return nil +} + +func (x *Decl_Syntax) GetOptions() *Options { + if x != nil { + return x.Options + } + return nil +} + +func (x *Decl_Syntax) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Decl_Syntax) GetKeywordSpan() *Span { + if x != nil { + return x.KeywordSpan + } + return nil +} + +func (x *Decl_Syntax) GetEqualsSpan() *Span { + if x != nil { + return x.EqualsSpan + } + return nil +} + +func (x *Decl_Syntax) GetSemicolonSpan() *Span { + if x != nil { + return x.SemicolonSpan + } + return nil +} + +// A package declaration. +type Decl_Package struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path *Path `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + Options *Options `protobuf:"bytes,2,opt,name=options,proto3" json:"options,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + KeywordSpan *Span `protobuf:"bytes,11,opt,name=keyword_span,json=keywordSpan,proto3" json:"keyword_span,omitempty"` + SemicolonSpan *Span `protobuf:"bytes,12,opt,name=semicolon_span,json=semicolonSpan,proto3" json:"semicolon_span,omitempty"` +} + +func (x *Decl_Package) Reset() { + *x = Decl_Package{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Decl_Package) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Decl_Package) ProtoMessage() {} + +func (x *Decl_Package) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Decl_Package.ProtoReflect.Descriptor instead. +func (*Decl_Package) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 2} +} + +func (x *Decl_Package) GetPath() *Path { + if x != nil { + return x.Path + } + return nil +} + +func (x *Decl_Package) GetOptions() *Options { + if x != nil { + return x.Options + } + return nil +} + +func (x *Decl_Package) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Decl_Package) GetKeywordSpan() *Span { + if x != nil { + return x.KeywordSpan + } + return nil +} + +func (x *Decl_Package) GetSemicolonSpan() *Span { + if x != nil { + return x.SemicolonSpan + } + return nil +} + +// An import declaration. +type Decl_Import struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Modifier Decl_Import_Modifier `protobuf:"varint,1,opt,name=modifier,proto3,enum=buf.compiler.v1.Decl_Import_Modifier" json:"modifier,omitempty"` + ImportPath *Expr `protobuf:"bytes,2,opt,name=import_path,json=importPath,proto3" json:"import_path,omitempty"` + Options *Options `protobuf:"bytes,3,opt,name=options,proto3" json:"options,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + KeywordSpan *Span `protobuf:"bytes,11,opt,name=keyword_span,json=keywordSpan,proto3" json:"keyword_span,omitempty"` + ModifierSpan *Span `protobuf:"bytes,12,opt,name=modifier_span,json=modifierSpan,proto3" json:"modifier_span,omitempty"` + ImportPathSpan *Span `protobuf:"bytes,13,opt,name=import_path_span,json=importPathSpan,proto3" json:"import_path_span,omitempty"` + SemicolonSpan *Span `protobuf:"bytes,14,opt,name=semicolon_span,json=semicolonSpan,proto3" json:"semicolon_span,omitempty"` +} + +func (x *Decl_Import) Reset() { + *x = Decl_Import{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Decl_Import) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Decl_Import) ProtoMessage() {} + +func (x *Decl_Import) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Decl_Import.ProtoReflect.Descriptor instead. +func (*Decl_Import) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 3} +} + +func (x *Decl_Import) GetModifier() Decl_Import_Modifier { + if x != nil { + return x.Modifier + } + return Decl_Import_MODIFIER_UNSPECIFIED +} + +func (x *Decl_Import) GetImportPath() *Expr { + if x != nil { + return x.ImportPath + } + return nil +} + +func (x *Decl_Import) GetOptions() *Options { + if x != nil { + return x.Options + } + return nil +} + +func (x *Decl_Import) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Decl_Import) GetKeywordSpan() *Span { + if x != nil { + return x.KeywordSpan + } + return nil +} + +func (x *Decl_Import) GetModifierSpan() *Span { + if x != nil { + return x.ModifierSpan + } + return nil +} + +func (x *Decl_Import) GetImportPathSpan() *Span { + if x != nil { + return x.ImportPathSpan + } + return nil +} + +func (x *Decl_Import) GetSemicolonSpan() *Span { + if x != nil { + return x.SemicolonSpan + } + return nil +} + +// The body of a message, enum, or similar declaration, which +// itself contains declarations. +type Decl_Body struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Decls []*Decl `protobuf:"bytes,1,rep,name=decls,proto3" json:"decls,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` +} + +func (x *Decl_Body) Reset() { + *x = Decl_Body{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Decl_Body) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Decl_Body) ProtoMessage() {} + +func (x *Decl_Body) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Decl_Body.ProtoReflect.Descriptor instead. +func (*Decl_Body) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 4} +} + +func (x *Decl_Body) GetDecls() []*Decl { + if x != nil { + return x.Decls + } + return nil +} + +func (x *Decl_Body) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +// An extensions or reserved range within a message. Both productions are +// extremely similar, so they share an AST node. +type Decl_Range struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Kind Decl_Range_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=buf.compiler.v1.Decl_Range_Kind" json:"kind,omitempty"` + Ranges []*Expr `protobuf:"bytes,2,rep,name=ranges,proto3" json:"ranges,omitempty"` + Options *Options `protobuf:"bytes,3,opt,name=options,proto3" json:"options,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + KeywordSpan *Span `protobuf:"bytes,11,opt,name=keyword_span,json=keywordSpan,proto3" json:"keyword_span,omitempty"` + SemicolonSpan *Span `protobuf:"bytes,12,opt,name=semicolon_span,json=semicolonSpan,proto3" json:"semicolon_span,omitempty"` +} + +func (x *Decl_Range) Reset() { + *x = Decl_Range{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Decl_Range) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Decl_Range) ProtoMessage() {} + +func (x *Decl_Range) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Decl_Range.ProtoReflect.Descriptor instead. +func (*Decl_Range) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{3, 5} +} + +func (x *Decl_Range) GetKind() Decl_Range_Kind { + if x != nil { + return x.Kind + } + return Decl_Range_KIND_UNSPECIFIED +} + +func (x *Decl_Range) GetRanges() []*Expr { + if x != nil { + return x.Ranges + } + return nil +} + +func (x *Decl_Range) GetOptions() *Options { + if x != nil { + return x.Options + } + return nil +} + +func (x *Decl_Range) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Decl_Range) GetKeywordSpan() *Span { + if x != nil { + return x.KeywordSpan + } + return nil +} + +func (x *Decl_Range) GetSemicolonSpan() *Span { + if x != nil { + return x.SemicolonSpan + } + return nil +} + +// A method signature. This appears on `KIND_METHOD`, for example. +type Def_Signature struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Inputs []*Type `protobuf:"bytes,1,rep,name=inputs,proto3" json:"inputs,omitempty"` + Outputs []*Type `protobuf:"bytes,2,rep,name=outputs,proto3" json:"outputs,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + InputSpan *Span `protobuf:"bytes,11,opt,name=input_span,json=inputSpan,proto3" json:"input_span,omitempty"` + ReturnsSpan *Span `protobuf:"bytes,12,opt,name=returns_span,json=returnsSpan,proto3" json:"returns_span,omitempty"` + OutputSpan *Span `protobuf:"bytes,13,opt,name=output_span,json=outputSpan,proto3" json:"output_span,omitempty"` +} + +func (x *Def_Signature) Reset() { + *x = Def_Signature{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Def_Signature) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Def_Signature) ProtoMessage() {} + +func (x *Def_Signature) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Def_Signature.ProtoReflect.Descriptor instead. +func (*Def_Signature) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{4, 0} +} + +func (x *Def_Signature) GetInputs() []*Type { + if x != nil { + return x.Inputs + } + return nil +} + +func (x *Def_Signature) GetOutputs() []*Type { + if x != nil { + return x.Outputs + } + return nil +} + +func (x *Def_Signature) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Def_Signature) GetInputSpan() *Span { + if x != nil { + return x.InputSpan + } + return nil +} + +func (x *Def_Signature) GetReturnsSpan() *Span { + if x != nil { + return x.ReturnsSpan + } + return nil +} + +func (x *Def_Signature) GetOutputSpan() *Span { + if x != nil { + return x.OutputSpan + } + return nil +} + +type Options_Entry struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path *Path `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + Value *Expr `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + EqualsSpan *Span `protobuf:"bytes,10,opt,name=equals_span,json=equalsSpan,proto3" json:"equals_span,omitempty"` +} + +func (x *Options_Entry) Reset() { + *x = Options_Entry{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Options_Entry) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Options_Entry) ProtoMessage() {} + +func (x *Options_Entry) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Options_Entry.ProtoReflect.Descriptor instead. +func (*Options_Entry) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{5, 0} +} + +func (x *Options_Entry) GetPath() *Path { + if x != nil { + return x.Path + } + return nil +} + +func (x *Options_Entry) GetValue() *Expr { + if x != nil { + return x.Value + } + return nil +} + +func (x *Options_Entry) GetEqualsSpan() *Span { + if x != nil { + return x.EqualsSpan + } + return nil +} + +// A literal value: a number or a string. +type Expr_Literal struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // None of these may be set, in the case of an integer with an invalid or + // out-of-range format. + // + // Types that are assignable to Value: + // + // *Expr_Literal_IntValue + // *Expr_Literal_FloatValue + // *Expr_Literal_StringValue + Value isExpr_Literal_Value `protobuf_oneof:"value"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` +} + +func (x *Expr_Literal) Reset() { + *x = Expr_Literal{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Expr_Literal) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Expr_Literal) ProtoMessage() {} + +func (x *Expr_Literal) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Expr_Literal.ProtoReflect.Descriptor instead. +func (*Expr_Literal) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{6, 0} +} + +func (m *Expr_Literal) GetValue() isExpr_Literal_Value { + if m != nil { + return m.Value + } + return nil +} + +func (x *Expr_Literal) GetIntValue() uint64 { + if x, ok := x.GetValue().(*Expr_Literal_IntValue); ok { + return x.IntValue + } + return 0 +} + +func (x *Expr_Literal) GetFloatValue() float64 { + if x, ok := x.GetValue().(*Expr_Literal_FloatValue); ok { + return x.FloatValue + } + return 0 +} + +func (x *Expr_Literal) GetStringValue() string { + if x, ok := x.GetValue().(*Expr_Literal_StringValue); ok { + return x.StringValue + } + return "" +} + +func (x *Expr_Literal) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +type isExpr_Literal_Value interface { + isExpr_Literal_Value() +} + +type Expr_Literal_IntValue struct { + IntValue uint64 `protobuf:"varint,1,opt,name=int_value,json=intValue,proto3,oneof"` +} + +type Expr_Literal_FloatValue struct { + FloatValue float64 `protobuf:"fixed64,2,opt,name=float_value,json=floatValue,proto3,oneof"` +} + +type Expr_Literal_StringValue struct { + StringValue string `protobuf:"bytes,3,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +func (*Expr_Literal_IntValue) isExpr_Literal_Value() {} + +func (*Expr_Literal_FloatValue) isExpr_Literal_Value() {} + +func (*Expr_Literal_StringValue) isExpr_Literal_Value() {} + +// An expression with some kind of prefix, such as a minus sign. +type Expr_Prefixed struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Prefix Expr_Prefixed_Prefix `protobuf:"varint,1,opt,name=prefix,proto3,enum=buf.compiler.v1.Expr_Prefixed_Prefix" json:"prefix,omitempty"` + Expr *Expr `protobuf:"bytes,2,opt,name=expr,proto3" json:"expr,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + PrefixSpan *Span `protobuf:"bytes,11,opt,name=prefix_span,json=prefixSpan,proto3" json:"prefix_span,omitempty"` +} + +func (x *Expr_Prefixed) Reset() { + *x = Expr_Prefixed{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Expr_Prefixed) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Expr_Prefixed) ProtoMessage() {} + +func (x *Expr_Prefixed) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Expr_Prefixed.ProtoReflect.Descriptor instead. +func (*Expr_Prefixed) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{6, 1} +} + +func (x *Expr_Prefixed) GetPrefix() Expr_Prefixed_Prefix { + if x != nil { + return x.Prefix + } + return Expr_Prefixed_PREFIX_UNSPECIFIED +} + +func (x *Expr_Prefixed) GetExpr() *Expr { + if x != nil { + return x.Expr + } + return nil +} + +func (x *Expr_Prefixed) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Expr_Prefixed) GetPrefixSpan() *Span { + if x != nil { + return x.PrefixSpan + } + return nil +} + +// A range expression, i.e. something like `1 to 10`. The `1 to max` is not +// speicial syntax; `max` is realized as a path expression. +// +// Ranges are inclusive. +type Expr_Range struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Start *Expr `protobuf:"bytes,1,opt,name=start,proto3" json:"start,omitempty"` + End *Expr `protobuf:"bytes,2,opt,name=end,proto3" json:"end,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + ToSpan *Span `protobuf:"bytes,11,opt,name=to_span,json=toSpan,proto3" json:"to_span,omitempty"` +} + +func (x *Expr_Range) Reset() { + *x = Expr_Range{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Expr_Range) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Expr_Range) ProtoMessage() {} + +func (x *Expr_Range) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Expr_Range.ProtoReflect.Descriptor instead. +func (*Expr_Range) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{6, 2} +} + +func (x *Expr_Range) GetStart() *Expr { + if x != nil { + return x.Start + } + return nil +} + +func (x *Expr_Range) GetEnd() *Expr { + if x != nil { + return x.End + } + return nil +} + +func (x *Expr_Range) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Expr_Range) GetToSpan() *Span { + if x != nil { + return x.ToSpan + } + return nil +} + +// An array literal, a sequence of expressions bound by square brackets. +type Expr_Array struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Elements []*Expr `protobuf:"bytes,1,rep,name=elements,proto3" json:"elements,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` +} + +func (x *Expr_Array) Reset() { + *x = Expr_Array{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Expr_Array) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Expr_Array) ProtoMessage() {} + +func (x *Expr_Array) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Expr_Array.ProtoReflect.Descriptor instead. +func (*Expr_Array) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{6, 3} +} + +func (x *Expr_Array) GetElements() []*Expr { + if x != nil { + return x.Elements + } + return nil +} + +func (x *Expr_Array) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +// A dictionary literal, a sequence of key-value pairs bound by curly braces. +type Expr_Dict struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Entries []*Expr_Kv `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` +} + +func (x *Expr_Dict) Reset() { + *x = Expr_Dict{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Expr_Dict) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Expr_Dict) ProtoMessage() {} + +func (x *Expr_Dict) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Expr_Dict.ProtoReflect.Descriptor instead. +func (*Expr_Dict) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{6, 4} +} + +func (x *Expr_Dict) GetEntries() []*Expr_Kv { + if x != nil { + return x.Entries + } + return nil +} + +func (x *Expr_Dict) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +// A key-value pair expression, which usually will appear inside of an +// `Expr.Dict`. +type Expr_Kv struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key *Expr `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Value *Expr `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + ColonSpan *Span `protobuf:"bytes,11,opt,name=colon_span,json=colonSpan,proto3" json:"colon_span,omitempty"` +} + +func (x *Expr_Kv) Reset() { + *x = Expr_Kv{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Expr_Kv) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Expr_Kv) ProtoMessage() {} + +func (x *Expr_Kv) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Expr_Kv.ProtoReflect.Descriptor instead. +func (*Expr_Kv) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{6, 5} +} + +func (x *Expr_Kv) GetKey() *Expr { + if x != nil { + return x.Key + } + return nil +} + +func (x *Expr_Kv) GetValue() *Expr { + if x != nil { + return x.Value + } + return nil +} + +func (x *Expr_Kv) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Expr_Kv) GetColonSpan() *Span { + if x != nil { + return x.ColonSpan + } + return nil +} + +// A type with a modifier prefix in front of it, such as `repeated` or `stream`. +type Type_Prefixed struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Prefix Type_Prefixed_Prefix `protobuf:"varint,1,opt,name=prefix,proto3,enum=buf.compiler.v1.Type_Prefixed_Prefix" json:"prefix,omitempty"` + Type *Type `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + PrefixSpan *Span `protobuf:"bytes,11,opt,name=prefix_span,json=prefixSpan,proto3" json:"prefix_span,omitempty"` +} + +func (x *Type_Prefixed) Reset() { + *x = Type_Prefixed{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Type_Prefixed) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Type_Prefixed) ProtoMessage() {} + +func (x *Type_Prefixed) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Type_Prefixed.ProtoReflect.Descriptor instead. +func (*Type_Prefixed) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{7, 0} +} + +func (x *Type_Prefixed) GetPrefix() Type_Prefixed_Prefix { + if x != nil { + return x.Prefix + } + return Type_Prefixed_PREFIX_UNSPECIFIED +} + +func (x *Type_Prefixed) GetType() *Type { + if x != nil { + return x.Type + } + return nil +} + +func (x *Type_Prefixed) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Type_Prefixed) GetPrefixSpan() *Span { + if x != nil { + return x.PrefixSpan + } + return nil +} + +// A type with generic arguments, such as `map`. +// +// Note that no other generic types are part of Protobuf, but we support arbitrary generic +// types since it is a more natural way to define the AST. +type Type_Generic struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Path *Path `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + Args []*Type `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"` + Span *Span `protobuf:"bytes,10,opt,name=span,proto3" json:"span,omitempty"` + BracketSpan *Span `protobuf:"bytes,11,opt,name=bracket_span,json=bracketSpan,proto3" json:"bracket_span,omitempty"` +} + +func (x *Type_Generic) Reset() { + *x = Type_Generic{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Type_Generic) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Type_Generic) ProtoMessage() {} + +func (x *Type_Generic) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_ast_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Type_Generic.ProtoReflect.Descriptor instead. +func (*Type_Generic) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_ast_proto_rawDescGZIP(), []int{7, 1} +} + +func (x *Type_Generic) GetPath() *Path { + if x != nil { + return x.Path + } + return nil +} + +func (x *Type_Generic) GetArgs() []*Type { + if x != nil { + return x.Args + } + return nil +} + +func (x *Type_Generic) GetSpan() *Span { + if x != nil { + return x.Span + } + return nil +} + +func (x *Type_Generic) GetBracketSpan() *Span { + if x != nil { + return x.BracketSpan + } + return nil +} + +var File_buf_compiler_v1_ast_proto protoreflect.FileDescriptor + +var file_buf_compiler_v1_ast_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x62, 0x75, 0x66, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2f, 0x76, + 0x31, 0x2f, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0f, 0x62, 0x75, 0x66, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x1a, 0x1c, 0x62, 0x75, + 0x66, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2f, 0x76, 0x31, 0x2f, 0x72, 0x65, + 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x65, 0x0a, 0x04, 0x46, 0x69, + 0x6c, 0x65, 0x12, 0x30, 0x0a, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1c, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x04, + 0x66, 0x69, 0x6c, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x64, 0x65, 0x63, 0x6c, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x63, 0x6c, 0x52, 0x05, 0x64, 0x65, 0x63, 0x6c, + 0x73, 0x22, 0x2e, 0x0a, 0x04, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, + 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x65, 0x6e, + 0x64, 0x22, 0xf1, 0x03, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x3f, 0x0a, 0x0a, 0x63, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, + 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, + 0x2e, 0x50, 0x61, 0x74, 0x68, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x52, + 0x0a, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x73, + 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, + 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x1a, 0xfc, 0x02, 0x0a, 0x09, 0x43, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x12, 0x16, 0x0a, 0x05, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x12, 0x35, 0x0a, 0x09, + 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x31, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x48, 0x00, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x12, 0x47, 0x0a, 0x09, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x29, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x2e, 0x43, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2e, 0x53, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, + 0x72, 0x52, 0x09, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x3c, 0x0a, 0x0e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0d, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x3c, 0x0a, 0x0e, 0x73, 0x65, + 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0d, 0x73, 0x65, 0x70, 0x61, 0x72, + 0x61, 0x74, 0x6f, 0x72, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x4e, 0x0a, 0x09, 0x53, 0x65, 0x70, 0x61, + 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x19, 0x0a, 0x15, 0x53, 0x45, 0x50, 0x41, 0x52, 0x41, 0x54, + 0x4f, 0x52, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, + 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x45, 0x50, 0x41, 0x52, 0x41, 0x54, 0x4f, 0x52, 0x5f, 0x44, 0x4f, + 0x54, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x45, 0x50, 0x41, 0x52, 0x41, 0x54, 0x4f, 0x52, + 0x5f, 0x53, 0x4c, 0x41, 0x53, 0x48, 0x10, 0x02, 0x42, 0x0b, 0x0a, 0x09, 0x63, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x22, 0x94, 0x11, 0x0a, 0x04, 0x44, 0x65, 0x63, 0x6c, 0x12, 0x33, + 0x0a, 0x05, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, + 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, + 0x44, 0x65, 0x63, 0x6c, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, 0x05, 0x65, 0x6d, + 0x70, 0x74, 0x79, 0x12, 0x36, 0x0a, 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x63, 0x6c, 0x2e, 0x53, 0x79, 0x6e, 0x74, 0x61, + 0x78, 0x48, 0x00, 0x52, 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x12, 0x36, 0x0a, 0x06, 0x69, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, + 0x63, 0x6c, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x06, 0x69, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x12, 0x39, 0x0a, 0x07, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x63, 0x6c, 0x2e, 0x50, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x48, 0x00, 0x52, 0x07, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x28, + 0x0a, 0x03, 0x64, 0x65, 0x66, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, + 0x66, 0x48, 0x00, 0x52, 0x03, 0x64, 0x65, 0x66, 0x12, 0x30, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x63, 0x6c, 0x2e, 0x42, 0x6f, + 0x64, 0x79, 0x48, 0x00, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0x33, 0x0a, 0x05, 0x72, 0x61, + 0x6e, 0x67, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x62, 0x75, 0x66, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x63, 0x6c, + 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x1a, + 0x32, 0x0a, 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, + 0x70, 0x61, 0x6e, 0x1a, 0xbc, 0x03, 0x0a, 0x06, 0x53, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x12, 0x35, + 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, + 0x65, 0x63, 0x6c, 0x2e, 0x53, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x2e, 0x4b, 0x69, 0x6e, 0x64, 0x52, + 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x2b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x12, 0x32, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, + 0x6e, 0x12, 0x38, 0x0a, 0x0c, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x73, 0x70, 0x61, + 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0b, + 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x36, 0x0a, 0x0b, 0x65, + 0x71, 0x75, 0x61, 0x6c, 0x73, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0a, 0x65, 0x71, 0x75, 0x61, 0x6c, 0x73, 0x53, + 0x70, 0x61, 0x6e, 0x12, 0x3c, 0x0a, 0x0e, 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, + 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, + 0x61, 0x6e, 0x52, 0x0d, 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x53, 0x70, 0x61, + 0x6e, 0x22, 0x3f, 0x0a, 0x04, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x14, 0x0a, 0x10, 0x4b, 0x49, 0x4e, + 0x44, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, + 0x0f, 0x0a, 0x0b, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x53, 0x59, 0x4e, 0x54, 0x41, 0x58, 0x10, 0x01, + 0x12, 0x10, 0x0a, 0x0c, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x45, 0x44, 0x49, 0x54, 0x49, 0x4f, 0x4e, + 0x10, 0x02, 0x1a, 0x8b, 0x02, 0x0a, 0x07, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x29, + 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x50, + 0x61, 0x74, 0x68, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x32, 0x0a, 0x07, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, 0x75, 0x66, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x29, 0x0a, + 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, + 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x38, 0x0a, 0x0c, 0x6b, 0x65, 0x79, 0x77, + 0x6f, 0x72, 0x64, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0b, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x53, 0x70, + 0x61, 0x6e, 0x12, 0x3c, 0x0a, 0x0e, 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x5f, + 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, + 0x6e, 0x52, 0x0d, 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x53, 0x70, 0x61, 0x6e, + 0x1a, 0xa5, 0x04, 0x0a, 0x06, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x41, 0x0a, 0x08, 0x6d, + 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, + 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, + 0x44, 0x65, 0x63, 0x6c, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x4d, 0x6f, 0x64, 0x69, + 0x66, 0x69, 0x65, 0x72, 0x52, 0x08, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x36, + 0x0a, 0x0b, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x52, 0x0a, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x32, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, + 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, + 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x38, 0x0a, 0x0c, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, + 0x61, 0x6e, 0x52, 0x0b, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x53, 0x70, 0x61, 0x6e, 0x12, + 0x3a, 0x0a, 0x0d, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x5f, 0x73, 0x70, 0x61, 0x6e, + 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0c, 0x6d, + 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x3f, 0x0a, 0x10, 0x69, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, + 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, + 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0e, 0x69, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x50, 0x61, 0x74, 0x68, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x3c, 0x0a, 0x0e, + 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0e, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0d, 0x73, 0x65, 0x6d, + 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x4c, 0x0a, 0x08, 0x4d, 0x6f, + 0x64, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x14, 0x4d, 0x4f, 0x44, 0x49, 0x46, 0x49, + 0x45, 0x52, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, + 0x12, 0x11, 0x0a, 0x0d, 0x4d, 0x4f, 0x44, 0x49, 0x46, 0x49, 0x45, 0x52, 0x5f, 0x57, 0x45, 0x41, + 0x4b, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x4d, 0x4f, 0x44, 0x49, 0x46, 0x49, 0x45, 0x52, 0x5f, + 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x1a, 0x5e, 0x0a, 0x04, 0x42, 0x6f, 0x64, 0x79, + 0x12, 0x2b, 0x0a, 0x05, 0x64, 0x65, 0x63, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x31, 0x2e, 0x44, 0x65, 0x63, 0x6c, 0x52, 0x05, 0x64, 0x65, 0x63, 0x6c, 0x73, 0x12, 0x29, 0x0a, + 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, + 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x1a, 0x89, 0x03, 0x0a, 0x05, 0x52, 0x61, 0x6e, + 0x67, 0x65, 0x12, 0x34, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x20, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x44, 0x65, 0x63, 0x6c, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x4b, 0x69, + 0x6e, 0x64, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x2d, 0x0a, 0x06, 0x72, 0x61, 0x6e, 0x67, + 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x52, + 0x06, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x73, + 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, + 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x38, 0x0a, 0x0c, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, + 0x64, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x70, 0x61, 0x6e, 0x52, 0x0b, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x53, 0x70, 0x61, 0x6e, + 0x12, 0x3c, 0x0a, 0x0e, 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x5f, 0x73, 0x70, + 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, + 0x0d, 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x44, + 0x0a, 0x04, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x14, 0x0a, 0x10, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x55, + 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, + 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x45, 0x58, 0x54, 0x45, 0x4e, 0x53, 0x49, 0x4f, 0x4e, 0x53, 0x10, + 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x52, 0x45, 0x53, 0x45, 0x52, 0x56, + 0x45, 0x44, 0x10, 0x02, 0x42, 0x06, 0x0a, 0x04, 0x64, 0x65, 0x63, 0x6c, 0x22, 0xbf, 0x08, 0x0a, + 0x03, 0x44, 0x65, 0x66, 0x12, 0x2d, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x66, 0x2e, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x04, 0x6b, + 0x69, 0x6e, 0x64, 0x12, 0x29, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x29, + 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x54, + 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x3c, 0x0a, 0x09, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, + 0x65, 0x66, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x09, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x12, 0x32, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, + 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, + 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x2e, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x63, 0x6c, 0x2e, 0x42, 0x6f, + 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, + 0x70, 0x61, 0x6e, 0x12, 0x38, 0x0a, 0x0c, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x73, + 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, + 0x52, 0x0b, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x36, 0x0a, + 0x0b, 0x65, 0x71, 0x75, 0x61, 0x6c, 0x73, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0a, 0x65, 0x71, 0x75, 0x61, 0x6c, + 0x73, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x3c, 0x0a, 0x0e, 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, + 0x6f, 0x6e, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, + 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, + 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0d, 0x73, 0x65, 0x6d, 0x69, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x53, + 0x70, 0x61, 0x6e, 0x1a, 0xbe, 0x02, 0x0a, 0x09, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x12, 0x2d, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, + 0x12, 0x2f, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x73, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x34, 0x0a, 0x0a, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x09, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x53, 0x70, + 0x61, 0x6e, 0x12, 0x38, 0x0a, 0x0c, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x73, 0x5f, 0x73, 0x70, + 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, + 0x0b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x73, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x36, 0x0a, 0x0b, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0d, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x53, 0x70, 0x61, 0x6e, 0x22, 0xc7, 0x01, 0x0a, 0x04, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x14, 0x0a, + 0x10, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, + 0x44, 0x10, 0x00, 0x12, 0x10, 0x0a, 0x0c, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x4d, 0x45, 0x53, 0x53, + 0x41, 0x47, 0x45, 0x10, 0x01, 0x12, 0x0d, 0x0a, 0x09, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x45, 0x4e, + 0x55, 0x4d, 0x10, 0x02, 0x12, 0x10, 0x0a, 0x0c, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x53, 0x45, 0x52, + 0x56, 0x49, 0x43, 0x45, 0x10, 0x03, 0x12, 0x0f, 0x0a, 0x0b, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x45, + 0x58, 0x54, 0x45, 0x4e, 0x44, 0x10, 0x04, 0x12, 0x0e, 0x0a, 0x0a, 0x4b, 0x49, 0x4e, 0x44, 0x5f, + 0x46, 0x49, 0x45, 0x4c, 0x44, 0x10, 0x05, 0x12, 0x13, 0x0a, 0x0f, 0x4b, 0x49, 0x4e, 0x44, 0x5f, + 0x45, 0x4e, 0x55, 0x4d, 0x5f, 0x56, 0x41, 0x4c, 0x55, 0x45, 0x10, 0x06, 0x12, 0x0e, 0x0a, 0x0a, + 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x4f, 0x4e, 0x45, 0x4f, 0x46, 0x10, 0x07, 0x12, 0x0e, 0x0a, 0x0a, + 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x10, 0x08, 0x12, 0x0f, 0x0a, 0x0b, + 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x4d, 0x45, 0x54, 0x48, 0x4f, 0x44, 0x10, 0x09, 0x12, 0x0f, 0x0a, + 0x0b, 0x4b, 0x49, 0x4e, 0x44, 0x5f, 0x4f, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x0a, 0x22, 0x88, + 0x02, 0x0a, 0x07, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x38, 0x0a, 0x07, 0x65, 0x6e, + 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x65, 0x6e, 0x74, + 0x72, 0x69, 0x65, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x1a, + 0x97, 0x01, 0x0a, 0x05, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x29, 0x0a, 0x04, 0x70, 0x61, 0x74, + 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x52, 0x04, + 0x70, 0x61, 0x74, 0x68, 0x12, 0x2b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x12, 0x36, 0x0a, 0x0b, 0x65, 0x71, 0x75, 0x61, 0x6c, 0x73, 0x5f, 0x73, 0x70, 0x61, 0x6e, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0a, 0x65, + 0x71, 0x75, 0x61, 0x6c, 0x73, 0x53, 0x70, 0x61, 0x6e, 0x22, 0xf8, 0x0a, 0x0a, 0x04, 0x45, 0x78, + 0x70, 0x72, 0x12, 0x39, 0x0a, 0x07, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x48, 0x00, 0x52, 0x07, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x12, 0x2b, 0x0a, + 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, + 0x74, 0x68, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x3c, 0x0a, 0x08, 0x70, 0x72, + 0x65, 0x66, 0x69, 0x78, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, + 0x78, 0x70, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x65, 0x64, 0x48, 0x00, 0x52, 0x08, + 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x65, 0x64, 0x12, 0x33, 0x0a, 0x05, 0x72, 0x61, 0x6e, 0x67, + 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x2e, 0x52, + 0x61, 0x6e, 0x67, 0x65, 0x48, 0x00, 0x52, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x33, 0x0a, + 0x05, 0x61, 0x72, 0x72, 0x61, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, + 0x78, 0x70, 0x72, 0x2e, 0x41, 0x72, 0x72, 0x61, 0x79, 0x48, 0x00, 0x52, 0x05, 0x61, 0x72, 0x72, + 0x61, 0x79, 0x12, 0x30, 0x0a, 0x04, 0x64, 0x69, 0x63, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1a, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x2e, 0x44, 0x69, 0x63, 0x74, 0x48, 0x00, 0x52, 0x04, + 0x64, 0x69, 0x63, 0x74, 0x12, 0x2a, 0x0a, 0x02, 0x6b, 0x76, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x18, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x2e, 0x4b, 0x76, 0x48, 0x00, 0x52, 0x02, 0x6b, 0x76, + 0x1a, 0xa4, 0x01, 0x0a, 0x07, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x12, 0x1d, 0x0a, 0x09, + 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x48, + 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0b, 0x66, + 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, + 0x48, 0x00, 0x52, 0x0a, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, + 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x42, 0x07, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x8b, 0x02, 0x0a, 0x08, 0x50, 0x72, 0x65, 0x66, + 0x69, 0x78, 0x65, 0x64, 0x12, 0x3d, 0x0a, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x66, + 0x69, 0x78, 0x65, 0x64, 0x2e, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, 0x06, 0x70, 0x72, 0x65, + 0x66, 0x69, 0x78, 0x12, 0x29, 0x0a, 0x04, 0x65, 0x78, 0x70, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x52, 0x04, 0x65, 0x78, 0x70, 0x72, 0x12, 0x29, + 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x36, 0x0a, 0x0b, 0x70, 0x72, 0x65, + 0x66, 0x69, 0x78, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, + 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0a, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x53, 0x70, 0x61, + 0x6e, 0x22, 0x32, 0x0a, 0x06, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x16, 0x0a, 0x12, 0x50, + 0x52, 0x45, 0x46, 0x49, 0x58, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, + 0x44, 0x10, 0x00, 0x12, 0x10, 0x0a, 0x0c, 0x50, 0x52, 0x45, 0x46, 0x49, 0x58, 0x5f, 0x4d, 0x49, + 0x4e, 0x55, 0x53, 0x10, 0x01, 0x1a, 0xb8, 0x01, 0x0a, 0x05, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, + 0x2b, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, + 0x2e, 0x45, 0x78, 0x70, 0x72, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x27, 0x0a, 0x03, + 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, + 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, + 0x12, 0x2e, 0x0a, 0x07, 0x74, 0x6f, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x06, 0x74, 0x6f, 0x53, 0x70, 0x61, 0x6e, + 0x1a, 0x65, 0x0a, 0x05, 0x41, 0x72, 0x72, 0x61, 0x79, 0x12, 0x31, 0x0a, 0x08, 0x65, 0x6c, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, + 0x70, 0x72, 0x52, 0x08, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x29, 0x0a, 0x04, + 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, + 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x1a, 0x65, 0x0a, 0x04, 0x44, 0x69, 0x63, 0x74, 0x12, + 0x32, 0x0a, 0x07, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x18, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x2e, 0x4b, 0x76, 0x52, 0x07, 0x65, 0x6e, 0x74, 0x72, + 0x69, 0x65, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x1a, 0xbb, + 0x01, 0x0a, 0x02, 0x4b, 0x76, 0x12, 0x27, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x72, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2b, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, + 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, + 0x45, 0x78, 0x70, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x29, 0x0a, 0x04, 0x73, + 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, + 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x34, 0x0a, 0x0a, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x5f, + 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, + 0x6e, 0x52, 0x09, 0x63, 0x6f, 0x6c, 0x6f, 0x6e, 0x53, 0x70, 0x61, 0x6e, 0x42, 0x06, 0x0a, 0x04, + 0x65, 0x78, 0x70, 0x72, 0x22, 0xc9, 0x05, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x2b, 0x0a, + 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, + 0x74, 0x68, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x3c, 0x0a, 0x08, 0x70, 0x72, + 0x65, 0x66, 0x69, 0x78, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x54, + 0x79, 0x70, 0x65, 0x2e, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x65, 0x64, 0x48, 0x00, 0x52, 0x08, + 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x65, 0x64, 0x12, 0x39, 0x0a, 0x07, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x69, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x62, 0x75, 0x66, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, + 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x48, 0x00, 0x52, 0x07, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x69, 0x63, 0x1a, 0xcb, 0x02, 0x0a, 0x08, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x65, 0x64, + 0x12, 0x3d, 0x0a, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x25, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x65, 0x64, + 0x2e, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x52, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, + 0x29, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, + 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, + 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, + 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, + 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x36, 0x0a, 0x0b, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x5f, + 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, + 0x6e, 0x52, 0x0a, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x72, 0x0a, + 0x06, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x16, 0x0a, 0x12, 0x50, 0x52, 0x45, 0x46, 0x49, + 0x58, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, + 0x13, 0x0a, 0x0f, 0x50, 0x52, 0x45, 0x46, 0x49, 0x58, 0x5f, 0x4f, 0x50, 0x54, 0x49, 0x4f, 0x4e, + 0x41, 0x4c, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x50, 0x52, 0x45, 0x46, 0x49, 0x58, 0x5f, 0x52, + 0x45, 0x50, 0x45, 0x41, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x13, 0x0a, 0x0f, 0x50, 0x52, 0x45, + 0x46, 0x49, 0x58, 0x5f, 0x52, 0x45, 0x51, 0x55, 0x49, 0x52, 0x45, 0x44, 0x10, 0x03, 0x12, 0x11, + 0x0a, 0x0d, 0x50, 0x52, 0x45, 0x46, 0x49, 0x58, 0x5f, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x10, + 0x04, 0x1a, 0xc4, 0x01, 0x0a, 0x07, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x12, 0x29, 0x0a, + 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, + 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, + 0x74, 0x68, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x29, 0x0a, 0x04, 0x61, 0x72, 0x67, 0x73, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x61, + 0x72, 0x67, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x38, + 0x0a, 0x0c, 0x62, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0b, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0b, 0x62, 0x72, 0x61, + 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x42, 0xc7, 0x01, 0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x42, 0x08, 0x41, 0x73, 0x74, 0x50, 0x72, 0x6f, + 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x48, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x62, 0x75, 0x66, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, + 0x67, 0x65, 0x6e, 0x2f, 0x62, 0x75, 0x66, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x2f, 0x76, 0x31, 0x3b, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x76, 0x31, 0xa2, 0x02, + 0x03, 0x42, 0x43, 0x58, 0xaa, 0x02, 0x0f, 0x42, 0x75, 0x66, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, 0x42, 0x75, 0x66, 0x5c, 0x43, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x1b, 0x42, 0x75, 0x66, 0x5c, 0x43, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x11, 0x42, 0x75, 0x66, 0x3a, 0x3a, 0x43, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, +} + +var ( + file_buf_compiler_v1_ast_proto_rawDescOnce sync.Once + file_buf_compiler_v1_ast_proto_rawDescData = file_buf_compiler_v1_ast_proto_rawDesc +) + +func file_buf_compiler_v1_ast_proto_rawDescGZIP() []byte { + file_buf_compiler_v1_ast_proto_rawDescOnce.Do(func() { + file_buf_compiler_v1_ast_proto_rawDescData = protoimpl.X.CompressGZIP(file_buf_compiler_v1_ast_proto_rawDescData) + }) + return file_buf_compiler_v1_ast_proto_rawDescData +} + +var file_buf_compiler_v1_ast_proto_enumTypes = make([]protoimpl.EnumInfo, 7) +var file_buf_compiler_v1_ast_proto_msgTypes = make([]protoimpl.MessageInfo, 25) +var file_buf_compiler_v1_ast_proto_goTypes = []any{ + (Path_Component_Separator)(0), // 0: buf.compiler.v1.Path.Component.Separator + (Decl_Syntax_Kind)(0), // 1: buf.compiler.v1.Decl.Syntax.Kind + (Decl_Import_Modifier)(0), // 2: buf.compiler.v1.Decl.Import.Modifier + (Decl_Range_Kind)(0), // 3: buf.compiler.v1.Decl.Range.Kind + (Def_Kind)(0), // 4: buf.compiler.v1.Def.Kind + (Expr_Prefixed_Prefix)(0), // 5: buf.compiler.v1.Expr.Prefixed.Prefix + (Type_Prefixed_Prefix)(0), // 6: buf.compiler.v1.Type.Prefixed.Prefix + (*File)(nil), // 7: buf.compiler.v1.File + (*Span)(nil), // 8: buf.compiler.v1.Span + (*Path)(nil), // 9: buf.compiler.v1.Path + (*Decl)(nil), // 10: buf.compiler.v1.Decl + (*Def)(nil), // 11: buf.compiler.v1.Def + (*Options)(nil), // 12: buf.compiler.v1.Options + (*Expr)(nil), // 13: buf.compiler.v1.Expr + (*Type)(nil), // 14: buf.compiler.v1.Type + (*Path_Component)(nil), // 15: buf.compiler.v1.Path.Component + (*Decl_Empty)(nil), // 16: buf.compiler.v1.Decl.Empty + (*Decl_Syntax)(nil), // 17: buf.compiler.v1.Decl.Syntax + (*Decl_Package)(nil), // 18: buf.compiler.v1.Decl.Package + (*Decl_Import)(nil), // 19: buf.compiler.v1.Decl.Import + (*Decl_Body)(nil), // 20: buf.compiler.v1.Decl.Body + (*Decl_Range)(nil), // 21: buf.compiler.v1.Decl.Range + (*Def_Signature)(nil), // 22: buf.compiler.v1.Def.Signature + (*Options_Entry)(nil), // 23: buf.compiler.v1.Options.Entry + (*Expr_Literal)(nil), // 24: buf.compiler.v1.Expr.Literal + (*Expr_Prefixed)(nil), // 25: buf.compiler.v1.Expr.Prefixed + (*Expr_Range)(nil), // 26: buf.compiler.v1.Expr.Range + (*Expr_Array)(nil), // 27: buf.compiler.v1.Expr.Array + (*Expr_Dict)(nil), // 28: buf.compiler.v1.Expr.Dict + (*Expr_Kv)(nil), // 29: buf.compiler.v1.Expr.Kv + (*Type_Prefixed)(nil), // 30: buf.compiler.v1.Type.Prefixed + (*Type_Generic)(nil), // 31: buf.compiler.v1.Type.Generic + (*Report_File)(nil), // 32: buf.compiler.v1.Report.File +} +var file_buf_compiler_v1_ast_proto_depIdxs = []int32{ + 32, // 0: buf.compiler.v1.File.file:type_name -> buf.compiler.v1.Report.File + 10, // 1: buf.compiler.v1.File.decls:type_name -> buf.compiler.v1.Decl + 15, // 2: buf.compiler.v1.Path.components:type_name -> buf.compiler.v1.Path.Component + 8, // 3: buf.compiler.v1.Path.span:type_name -> buf.compiler.v1.Span + 16, // 4: buf.compiler.v1.Decl.empty:type_name -> buf.compiler.v1.Decl.Empty + 17, // 5: buf.compiler.v1.Decl.syntax:type_name -> buf.compiler.v1.Decl.Syntax + 19, // 6: buf.compiler.v1.Decl.import:type_name -> buf.compiler.v1.Decl.Import + 18, // 7: buf.compiler.v1.Decl.package:type_name -> buf.compiler.v1.Decl.Package + 11, // 8: buf.compiler.v1.Decl.def:type_name -> buf.compiler.v1.Def + 20, // 9: buf.compiler.v1.Decl.body:type_name -> buf.compiler.v1.Decl.Body + 21, // 10: buf.compiler.v1.Decl.range:type_name -> buf.compiler.v1.Decl.Range + 4, // 11: buf.compiler.v1.Def.kind:type_name -> buf.compiler.v1.Def.Kind + 9, // 12: buf.compiler.v1.Def.name:type_name -> buf.compiler.v1.Path + 14, // 13: buf.compiler.v1.Def.type:type_name -> buf.compiler.v1.Type + 22, // 14: buf.compiler.v1.Def.signature:type_name -> buf.compiler.v1.Def.Signature + 13, // 15: buf.compiler.v1.Def.value:type_name -> buf.compiler.v1.Expr + 12, // 16: buf.compiler.v1.Def.options:type_name -> buf.compiler.v1.Options + 20, // 17: buf.compiler.v1.Def.body:type_name -> buf.compiler.v1.Decl.Body + 8, // 18: buf.compiler.v1.Def.span:type_name -> buf.compiler.v1.Span + 8, // 19: buf.compiler.v1.Def.keyword_span:type_name -> buf.compiler.v1.Span + 8, // 20: buf.compiler.v1.Def.equals_span:type_name -> buf.compiler.v1.Span + 8, // 21: buf.compiler.v1.Def.semicolon_span:type_name -> buf.compiler.v1.Span + 23, // 22: buf.compiler.v1.Options.entries:type_name -> buf.compiler.v1.Options.Entry + 8, // 23: buf.compiler.v1.Options.span:type_name -> buf.compiler.v1.Span + 24, // 24: buf.compiler.v1.Expr.literal:type_name -> buf.compiler.v1.Expr.Literal + 9, // 25: buf.compiler.v1.Expr.path:type_name -> buf.compiler.v1.Path + 25, // 26: buf.compiler.v1.Expr.prefixed:type_name -> buf.compiler.v1.Expr.Prefixed + 26, // 27: buf.compiler.v1.Expr.range:type_name -> buf.compiler.v1.Expr.Range + 27, // 28: buf.compiler.v1.Expr.array:type_name -> buf.compiler.v1.Expr.Array + 28, // 29: buf.compiler.v1.Expr.dict:type_name -> buf.compiler.v1.Expr.Dict + 29, // 30: buf.compiler.v1.Expr.kv:type_name -> buf.compiler.v1.Expr.Kv + 9, // 31: buf.compiler.v1.Type.path:type_name -> buf.compiler.v1.Path + 30, // 32: buf.compiler.v1.Type.prefixed:type_name -> buf.compiler.v1.Type.Prefixed + 31, // 33: buf.compiler.v1.Type.generic:type_name -> buf.compiler.v1.Type.Generic + 9, // 34: buf.compiler.v1.Path.Component.extension:type_name -> buf.compiler.v1.Path + 0, // 35: buf.compiler.v1.Path.Component.separator:type_name -> buf.compiler.v1.Path.Component.Separator + 8, // 36: buf.compiler.v1.Path.Component.component_span:type_name -> buf.compiler.v1.Span + 8, // 37: buf.compiler.v1.Path.Component.separator_span:type_name -> buf.compiler.v1.Span + 8, // 38: buf.compiler.v1.Decl.Empty.span:type_name -> buf.compiler.v1.Span + 1, // 39: buf.compiler.v1.Decl.Syntax.kind:type_name -> buf.compiler.v1.Decl.Syntax.Kind + 13, // 40: buf.compiler.v1.Decl.Syntax.value:type_name -> buf.compiler.v1.Expr + 12, // 41: buf.compiler.v1.Decl.Syntax.options:type_name -> buf.compiler.v1.Options + 8, // 42: buf.compiler.v1.Decl.Syntax.span:type_name -> buf.compiler.v1.Span + 8, // 43: buf.compiler.v1.Decl.Syntax.keyword_span:type_name -> buf.compiler.v1.Span + 8, // 44: buf.compiler.v1.Decl.Syntax.equals_span:type_name -> buf.compiler.v1.Span + 8, // 45: buf.compiler.v1.Decl.Syntax.semicolon_span:type_name -> buf.compiler.v1.Span + 9, // 46: buf.compiler.v1.Decl.Package.path:type_name -> buf.compiler.v1.Path + 12, // 47: buf.compiler.v1.Decl.Package.options:type_name -> buf.compiler.v1.Options + 8, // 48: buf.compiler.v1.Decl.Package.span:type_name -> buf.compiler.v1.Span + 8, // 49: buf.compiler.v1.Decl.Package.keyword_span:type_name -> buf.compiler.v1.Span + 8, // 50: buf.compiler.v1.Decl.Package.semicolon_span:type_name -> buf.compiler.v1.Span + 2, // 51: buf.compiler.v1.Decl.Import.modifier:type_name -> buf.compiler.v1.Decl.Import.Modifier + 13, // 52: buf.compiler.v1.Decl.Import.import_path:type_name -> buf.compiler.v1.Expr + 12, // 53: buf.compiler.v1.Decl.Import.options:type_name -> buf.compiler.v1.Options + 8, // 54: buf.compiler.v1.Decl.Import.span:type_name -> buf.compiler.v1.Span + 8, // 55: buf.compiler.v1.Decl.Import.keyword_span:type_name -> buf.compiler.v1.Span + 8, // 56: buf.compiler.v1.Decl.Import.modifier_span:type_name -> buf.compiler.v1.Span + 8, // 57: buf.compiler.v1.Decl.Import.import_path_span:type_name -> buf.compiler.v1.Span + 8, // 58: buf.compiler.v1.Decl.Import.semicolon_span:type_name -> buf.compiler.v1.Span + 10, // 59: buf.compiler.v1.Decl.Body.decls:type_name -> buf.compiler.v1.Decl + 8, // 60: buf.compiler.v1.Decl.Body.span:type_name -> buf.compiler.v1.Span + 3, // 61: buf.compiler.v1.Decl.Range.kind:type_name -> buf.compiler.v1.Decl.Range.Kind + 13, // 62: buf.compiler.v1.Decl.Range.ranges:type_name -> buf.compiler.v1.Expr + 12, // 63: buf.compiler.v1.Decl.Range.options:type_name -> buf.compiler.v1.Options + 8, // 64: buf.compiler.v1.Decl.Range.span:type_name -> buf.compiler.v1.Span + 8, // 65: buf.compiler.v1.Decl.Range.keyword_span:type_name -> buf.compiler.v1.Span + 8, // 66: buf.compiler.v1.Decl.Range.semicolon_span:type_name -> buf.compiler.v1.Span + 14, // 67: buf.compiler.v1.Def.Signature.inputs:type_name -> buf.compiler.v1.Type + 14, // 68: buf.compiler.v1.Def.Signature.outputs:type_name -> buf.compiler.v1.Type + 8, // 69: buf.compiler.v1.Def.Signature.span:type_name -> buf.compiler.v1.Span + 8, // 70: buf.compiler.v1.Def.Signature.input_span:type_name -> buf.compiler.v1.Span + 8, // 71: buf.compiler.v1.Def.Signature.returns_span:type_name -> buf.compiler.v1.Span + 8, // 72: buf.compiler.v1.Def.Signature.output_span:type_name -> buf.compiler.v1.Span + 9, // 73: buf.compiler.v1.Options.Entry.path:type_name -> buf.compiler.v1.Path + 13, // 74: buf.compiler.v1.Options.Entry.value:type_name -> buf.compiler.v1.Expr + 8, // 75: buf.compiler.v1.Options.Entry.equals_span:type_name -> buf.compiler.v1.Span + 8, // 76: buf.compiler.v1.Expr.Literal.span:type_name -> buf.compiler.v1.Span + 5, // 77: buf.compiler.v1.Expr.Prefixed.prefix:type_name -> buf.compiler.v1.Expr.Prefixed.Prefix + 13, // 78: buf.compiler.v1.Expr.Prefixed.expr:type_name -> buf.compiler.v1.Expr + 8, // 79: buf.compiler.v1.Expr.Prefixed.span:type_name -> buf.compiler.v1.Span + 8, // 80: buf.compiler.v1.Expr.Prefixed.prefix_span:type_name -> buf.compiler.v1.Span + 13, // 81: buf.compiler.v1.Expr.Range.start:type_name -> buf.compiler.v1.Expr + 13, // 82: buf.compiler.v1.Expr.Range.end:type_name -> buf.compiler.v1.Expr + 8, // 83: buf.compiler.v1.Expr.Range.span:type_name -> buf.compiler.v1.Span + 8, // 84: buf.compiler.v1.Expr.Range.to_span:type_name -> buf.compiler.v1.Span + 13, // 85: buf.compiler.v1.Expr.Array.elements:type_name -> buf.compiler.v1.Expr + 8, // 86: buf.compiler.v1.Expr.Array.span:type_name -> buf.compiler.v1.Span + 29, // 87: buf.compiler.v1.Expr.Dict.entries:type_name -> buf.compiler.v1.Expr.Kv + 8, // 88: buf.compiler.v1.Expr.Dict.span:type_name -> buf.compiler.v1.Span + 13, // 89: buf.compiler.v1.Expr.Kv.key:type_name -> buf.compiler.v1.Expr + 13, // 90: buf.compiler.v1.Expr.Kv.value:type_name -> buf.compiler.v1.Expr + 8, // 91: buf.compiler.v1.Expr.Kv.span:type_name -> buf.compiler.v1.Span + 8, // 92: buf.compiler.v1.Expr.Kv.colon_span:type_name -> buf.compiler.v1.Span + 6, // 93: buf.compiler.v1.Type.Prefixed.prefix:type_name -> buf.compiler.v1.Type.Prefixed.Prefix + 14, // 94: buf.compiler.v1.Type.Prefixed.type:type_name -> buf.compiler.v1.Type + 8, // 95: buf.compiler.v1.Type.Prefixed.span:type_name -> buf.compiler.v1.Span + 8, // 96: buf.compiler.v1.Type.Prefixed.prefix_span:type_name -> buf.compiler.v1.Span + 9, // 97: buf.compiler.v1.Type.Generic.path:type_name -> buf.compiler.v1.Path + 14, // 98: buf.compiler.v1.Type.Generic.args:type_name -> buf.compiler.v1.Type + 8, // 99: buf.compiler.v1.Type.Generic.span:type_name -> buf.compiler.v1.Span + 8, // 100: buf.compiler.v1.Type.Generic.bracket_span:type_name -> buf.compiler.v1.Span + 101, // [101:101] is the sub-list for method output_type + 101, // [101:101] is the sub-list for method input_type + 101, // [101:101] is the sub-list for extension type_name + 101, // [101:101] is the sub-list for extension extendee + 0, // [0:101] is the sub-list for field type_name +} + +func init() { file_buf_compiler_v1_ast_proto_init() } +func file_buf_compiler_v1_ast_proto_init() { + if File_buf_compiler_v1_ast_proto != nil { + return + } + file_buf_compiler_v1_report_proto_init() + if !protoimpl.UnsafeEnabled { + file_buf_compiler_v1_ast_proto_msgTypes[0].Exporter = func(v any, i int) any { + switch v := v.(*File); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[1].Exporter = func(v any, i int) any { + switch v := v.(*Span); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[2].Exporter = func(v any, i int) any { + switch v := v.(*Path); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[3].Exporter = func(v any, i int) any { + switch v := v.(*Decl); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[4].Exporter = func(v any, i int) any { + switch v := v.(*Def); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[5].Exporter = func(v any, i int) any { + switch v := v.(*Options); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[6].Exporter = func(v any, i int) any { + switch v := v.(*Expr); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[7].Exporter = func(v any, i int) any { + switch v := v.(*Type); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[8].Exporter = func(v any, i int) any { + switch v := v.(*Path_Component); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[9].Exporter = func(v any, i int) any { + switch v := v.(*Decl_Empty); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[10].Exporter = func(v any, i int) any { + switch v := v.(*Decl_Syntax); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[11].Exporter = func(v any, i int) any { + switch v := v.(*Decl_Package); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[12].Exporter = func(v any, i int) any { + switch v := v.(*Decl_Import); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[13].Exporter = func(v any, i int) any { + switch v := v.(*Decl_Body); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[14].Exporter = func(v any, i int) any { + switch v := v.(*Decl_Range); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[15].Exporter = func(v any, i int) any { + switch v := v.(*Def_Signature); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[16].Exporter = func(v any, i int) any { + switch v := v.(*Options_Entry); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[17].Exporter = func(v any, i int) any { + switch v := v.(*Expr_Literal); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[18].Exporter = func(v any, i int) any { + switch v := v.(*Expr_Prefixed); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[19].Exporter = func(v any, i int) any { + switch v := v.(*Expr_Range); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[20].Exporter = func(v any, i int) any { + switch v := v.(*Expr_Array); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[21].Exporter = func(v any, i int) any { + switch v := v.(*Expr_Dict); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[22].Exporter = func(v any, i int) any { + switch v := v.(*Expr_Kv); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[23].Exporter = func(v any, i int) any { + switch v := v.(*Type_Prefixed); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_ast_proto_msgTypes[24].Exporter = func(v any, i int) any { + switch v := v.(*Type_Generic); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_buf_compiler_v1_ast_proto_msgTypes[3].OneofWrappers = []any{ + (*Decl_Empty_)(nil), + (*Decl_Syntax_)(nil), + (*Decl_Import_)(nil), + (*Decl_Package_)(nil), + (*Decl_Def)(nil), + (*Decl_Body_)(nil), + (*Decl_Range_)(nil), + } + file_buf_compiler_v1_ast_proto_msgTypes[6].OneofWrappers = []any{ + (*Expr_Literal_)(nil), + (*Expr_Path)(nil), + (*Expr_Prefixed_)(nil), + (*Expr_Range_)(nil), + (*Expr_Array_)(nil), + (*Expr_Dict_)(nil), + (*Expr_Kv_)(nil), + } + file_buf_compiler_v1_ast_proto_msgTypes[7].OneofWrappers = []any{ + (*Type_Path)(nil), + (*Type_Prefixed_)(nil), + (*Type_Generic_)(nil), + } + file_buf_compiler_v1_ast_proto_msgTypes[8].OneofWrappers = []any{ + (*Path_Component_Ident)(nil), + (*Path_Component_Extension)(nil), + } + file_buf_compiler_v1_ast_proto_msgTypes[17].OneofWrappers = []any{ + (*Expr_Literal_IntValue)(nil), + (*Expr_Literal_FloatValue)(nil), + (*Expr_Literal_StringValue)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_buf_compiler_v1_ast_proto_rawDesc, + NumEnums: 7, + NumMessages: 25, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_buf_compiler_v1_ast_proto_goTypes, + DependencyIndexes: file_buf_compiler_v1_ast_proto_depIdxs, + EnumInfos: file_buf_compiler_v1_ast_proto_enumTypes, + MessageInfos: file_buf_compiler_v1_ast_proto_msgTypes, + }.Build() + File_buf_compiler_v1_ast_proto = out.File + file_buf_compiler_v1_ast_proto_rawDesc = nil + file_buf_compiler_v1_ast_proto_goTypes = nil + file_buf_compiler_v1_ast_proto_depIdxs = nil +} diff --git a/internal/gen/buf/compiler/v1/report.pb.go b/internal/gen/buf/compiler/v1/report.pb.go new file mode 100644 index 00000000..dc2ebc79 --- /dev/null +++ b/internal/gen/buf/compiler/v1/report.pb.go @@ -0,0 +1,569 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.34.2 +// protoc (unknown) +// source: buf/compiler/v1/report.proto + +package compilerv1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// A diagnostic level. This affects how (and whether) it is shown to users. +type Diagnostic_Level int32 + +const ( + Diagnostic_LEVEL_UNSPECIFIED Diagnostic_Level = 0 + Diagnostic_LEVEL_ERROR Diagnostic_Level = 1 + Diagnostic_LEVEL_WARNING Diagnostic_Level = 2 + Diagnostic_LEVEL_REMARK Diagnostic_Level = 3 +) + +// Enum value maps for Diagnostic_Level. +var ( + Diagnostic_Level_name = map[int32]string{ + 0: "LEVEL_UNSPECIFIED", + 1: "LEVEL_ERROR", + 2: "LEVEL_WARNING", + 3: "LEVEL_REMARK", + } + Diagnostic_Level_value = map[string]int32{ + "LEVEL_UNSPECIFIED": 0, + "LEVEL_ERROR": 1, + "LEVEL_WARNING": 2, + "LEVEL_REMARK": 3, + } +) + +func (x Diagnostic_Level) Enum() *Diagnostic_Level { + p := new(Diagnostic_Level) + *p = x + return p +} + +func (x Diagnostic_Level) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Diagnostic_Level) Descriptor() protoreflect.EnumDescriptor { + return file_buf_compiler_v1_report_proto_enumTypes[0].Descriptor() +} + +func (Diagnostic_Level) Type() protoreflect.EnumType { + return &file_buf_compiler_v1_report_proto_enumTypes[0] +} + +func (x Diagnostic_Level) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Diagnostic_Level.Descriptor instead. +func (Diagnostic_Level) EnumDescriptor() ([]byte, []int) { + return file_buf_compiler_v1_report_proto_rawDescGZIP(), []int{1, 0} +} + +// A diagnostic report, consisting of `Diagnostics` and the `File`s they diagnose. +type Report struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Files []*Report_File `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` + Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"` +} + +func (x *Report) Reset() { + *x = Report{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_report_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Report) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Report) ProtoMessage() {} + +func (x *Report) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_report_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Report.ProtoReflect.Descriptor instead. +func (*Report) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_report_proto_rawDescGZIP(), []int{0} +} + +func (x *Report) GetFiles() []*Report_File { + if x != nil { + return x.Files + } + return nil +} + +func (x *Report) GetDiagnostics() []*Diagnostic { + if x != nil { + return x.Diagnostics + } + return nil +} + +// A diagnostic within a `Report`. +type Diagnostic struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Required. The message to show for this diagnostic. This should fit on one line. + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + // Required. The level for this diagnostic. + Level Diagnostic_Level `protobuf:"varint,2,opt,name=level,proto3,enum=buf.compiler.v1.Diagnostic_Level" json:"level,omitempty"` + // An optional path to show in the diagnostic, if it has no annotations. + // This is useful for e.g. diagnostics that would have no spans. + InFile string `protobuf:"bytes,3,opt,name=in_file,json=inFile,proto3" json:"in_file,omitempty"` + // Annotations for source code relevant to this diagnostic. + Annotations []*Diagnostic_Annotation `protobuf:"bytes,4,rep,name=annotations,proto3" json:"annotations,omitempty"` + // Notes about the error to show to the user. May span multiple lines. + Notes []string `protobuf:"bytes,5,rep,name=notes,proto3" json:"notes,omitempty"` + // Helpful suggestions to the user. + Help []string `protobuf:"bytes,6,rep,name=help,proto3" json:"help,omitempty"` + // Debugging information related to the diagnostic. This should only be + // used for information about debugging a tool or compiler that emits the + // diagnostic, not the code being diagnosed. + Debug []string `protobuf:"bytes,7,rep,name=debug,proto3" json:"debug,omitempty"` +} + +func (x *Diagnostic) Reset() { + *x = Diagnostic{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_report_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Diagnostic) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Diagnostic) ProtoMessage() {} + +func (x *Diagnostic) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_report_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Diagnostic.ProtoReflect.Descriptor instead. +func (*Diagnostic) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_report_proto_rawDescGZIP(), []int{1} +} + +func (x *Diagnostic) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *Diagnostic) GetLevel() Diagnostic_Level { + if x != nil { + return x.Level + } + return Diagnostic_LEVEL_UNSPECIFIED +} + +func (x *Diagnostic) GetInFile() string { + if x != nil { + return x.InFile + } + return "" +} + +func (x *Diagnostic) GetAnnotations() []*Diagnostic_Annotation { + if x != nil { + return x.Annotations + } + return nil +} + +func (x *Diagnostic) GetNotes() []string { + if x != nil { + return x.Notes + } + return nil +} + +func (x *Diagnostic) GetHelp() []string { + if x != nil { + return x.Help + } + return nil +} + +func (x *Diagnostic) GetDebug() []string { + if x != nil { + return x.Debug + } + return nil +} + +// A file involved in a diagnostic `Report`. +type Report_File struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The path to this file. Does not need to be meaningful as a file-system path. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // The textual contents of this file. + Text []byte `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` +} + +func (x *Report_File) Reset() { + *x = Report_File{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_report_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Report_File) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Report_File) ProtoMessage() {} + +func (x *Report_File) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_report_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Report_File.ProtoReflect.Descriptor instead. +func (*Report_File) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_report_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *Report_File) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *Report_File) GetText() []byte { + if x != nil { + return x.Text + } + return nil +} + +// A file annotation within a `Diagnostic`. This corresponds to a single +// span of source code in a `Report`'s file. +type Diagnostic_Annotation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A message to show under this snippet. May be empty. + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + // Whether this is a "primary" snippet, which is used for deciding whether or not + // to mark the snippet with the same color as the overall diagnostic. + Primary bool `protobuf:"varint,2,opt,name=primary,proto3" json:"primary,omitempty"` + // The index of `Report.files` of the file this annotation is for. + // + // This is not a whole `Report.File` to help keep serialized reports slim. This + // avoids neeidng to duplicate the whole text of the file one for every annotation. + File uint32 `protobuf:"varint,3,opt,name=file,proto3" json:"file,omitempty"` + // The start offset of the annotated snippet, in bytes. + Start uint32 `protobuf:"varint,4,opt,name=start,proto3" json:"start,omitempty"` + // The end offset of the annotated snippet, in bytes. + End uint32 `protobuf:"varint,5,opt,name=end,proto3" json:"end,omitempty"` +} + +func (x *Diagnostic_Annotation) Reset() { + *x = Diagnostic_Annotation{} + if protoimpl.UnsafeEnabled { + mi := &file_buf_compiler_v1_report_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Diagnostic_Annotation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Diagnostic_Annotation) ProtoMessage() {} + +func (x *Diagnostic_Annotation) ProtoReflect() protoreflect.Message { + mi := &file_buf_compiler_v1_report_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Diagnostic_Annotation.ProtoReflect.Descriptor instead. +func (*Diagnostic_Annotation) Descriptor() ([]byte, []int) { + return file_buf_compiler_v1_report_proto_rawDescGZIP(), []int{1, 0} +} + +func (x *Diagnostic_Annotation) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *Diagnostic_Annotation) GetPrimary() bool { + if x != nil { + return x.Primary + } + return false +} + +func (x *Diagnostic_Annotation) GetFile() uint32 { + if x != nil { + return x.File + } + return 0 +} + +func (x *Diagnostic_Annotation) GetStart() uint32 { + if x != nil { + return x.Start + } + return 0 +} + +func (x *Diagnostic_Annotation) GetEnd() uint32 { + if x != nil { + return x.End + } + return 0 +} + +var File_buf_compiler_v1_report_proto protoreflect.FileDescriptor + +var file_buf_compiler_v1_report_proto_rawDesc = []byte{ + 0x0a, 0x1c, 0x62, 0x75, 0x66, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2f, 0x76, + 0x31, 0x2f, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0f, + 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x22, + 0xab, 0x01, 0x0a, 0x06, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x32, 0x0a, 0x05, 0x66, 0x69, + 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x62, 0x75, 0x66, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x70, 0x6f, + 0x72, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x3d, + 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, + 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x1a, 0x2e, 0x0a, + 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, + 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x22, 0xd6, 0x03, + 0x0a, 0x0a, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x12, 0x18, 0x0a, 0x07, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x37, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, + 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, + 0x69, 0x63, 0x2e, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, + 0x17, 0x0a, 0x07, 0x69, 0x6e, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x69, 0x6e, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x48, 0x0a, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, + 0x62, 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x2e, + 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6e, 0x6f, 0x74, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x05, 0x6e, 0x6f, 0x74, 0x65, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x65, 0x6c, 0x70, + 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x68, 0x65, 0x6c, 0x70, 0x12, 0x14, 0x0a, 0x05, + 0x64, 0x65, 0x62, 0x75, 0x67, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x64, 0x65, 0x62, + 0x75, 0x67, 0x1a, 0x7c, 0x0a, 0x0a, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, + 0x69, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x70, 0x72, 0x69, + 0x6d, 0x61, 0x72, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x04, 0x66, 0x69, 0x6c, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x10, + 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x65, 0x6e, 0x64, + 0x22, 0x54, 0x0a, 0x05, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x15, 0x0a, 0x11, 0x4c, 0x45, 0x56, + 0x45, 0x4c, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, + 0x12, 0x0f, 0x0a, 0x0b, 0x4c, 0x45, 0x56, 0x45, 0x4c, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, + 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x4c, 0x45, 0x56, 0x45, 0x4c, 0x5f, 0x57, 0x41, 0x52, 0x4e, 0x49, + 0x4e, 0x47, 0x10, 0x02, 0x12, 0x10, 0x0a, 0x0c, 0x4c, 0x45, 0x56, 0x45, 0x4c, 0x5f, 0x52, 0x45, + 0x4d, 0x41, 0x52, 0x4b, 0x10, 0x03, 0x42, 0xca, 0x01, 0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x2e, 0x62, + 0x75, 0x66, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x31, 0x42, 0x0b, + 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x48, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x62, 0x75, 0x66, 0x62, 0x75, 0x69, + 0x6c, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x2f, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x62, 0x75, 0x66, + 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2f, 0x76, 0x31, 0x3b, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x42, 0x43, 0x58, 0xaa, 0x02, 0x0f, + 0x42, 0x75, 0x66, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x2e, 0x56, 0x31, 0xca, + 0x02, 0x0f, 0x42, 0x75, 0x66, 0x5c, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5c, 0x56, + 0x31, 0xe2, 0x02, 0x1b, 0x42, 0x75, 0x66, 0x5c, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, + 0x02, 0x11, 0x42, 0x75, 0x66, 0x3a, 0x3a, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x3a, + 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_buf_compiler_v1_report_proto_rawDescOnce sync.Once + file_buf_compiler_v1_report_proto_rawDescData = file_buf_compiler_v1_report_proto_rawDesc +) + +func file_buf_compiler_v1_report_proto_rawDescGZIP() []byte { + file_buf_compiler_v1_report_proto_rawDescOnce.Do(func() { + file_buf_compiler_v1_report_proto_rawDescData = protoimpl.X.CompressGZIP(file_buf_compiler_v1_report_proto_rawDescData) + }) + return file_buf_compiler_v1_report_proto_rawDescData +} + +var file_buf_compiler_v1_report_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_buf_compiler_v1_report_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_buf_compiler_v1_report_proto_goTypes = []any{ + (Diagnostic_Level)(0), // 0: buf.compiler.v1.Diagnostic.Level + (*Report)(nil), // 1: buf.compiler.v1.Report + (*Diagnostic)(nil), // 2: buf.compiler.v1.Diagnostic + (*Report_File)(nil), // 3: buf.compiler.v1.Report.File + (*Diagnostic_Annotation)(nil), // 4: buf.compiler.v1.Diagnostic.Annotation +} +var file_buf_compiler_v1_report_proto_depIdxs = []int32{ + 3, // 0: buf.compiler.v1.Report.files:type_name -> buf.compiler.v1.Report.File + 2, // 1: buf.compiler.v1.Report.diagnostics:type_name -> buf.compiler.v1.Diagnostic + 0, // 2: buf.compiler.v1.Diagnostic.level:type_name -> buf.compiler.v1.Diagnostic.Level + 4, // 3: buf.compiler.v1.Diagnostic.annotations:type_name -> buf.compiler.v1.Diagnostic.Annotation + 4, // [4:4] is the sub-list for method output_type + 4, // [4:4] is the sub-list for method input_type + 4, // [4:4] is the sub-list for extension type_name + 4, // [4:4] is the sub-list for extension extendee + 0, // [0:4] is the sub-list for field type_name +} + +func init() { file_buf_compiler_v1_report_proto_init() } +func file_buf_compiler_v1_report_proto_init() { + if File_buf_compiler_v1_report_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_buf_compiler_v1_report_proto_msgTypes[0].Exporter = func(v any, i int) any { + switch v := v.(*Report); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_report_proto_msgTypes[1].Exporter = func(v any, i int) any { + switch v := v.(*Diagnostic); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_report_proto_msgTypes[2].Exporter = func(v any, i int) any { + switch v := v.(*Report_File); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_buf_compiler_v1_report_proto_msgTypes[3].Exporter = func(v any, i int) any { + switch v := v.(*Diagnostic_Annotation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_buf_compiler_v1_report_proto_rawDesc, + NumEnums: 1, + NumMessages: 4, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_buf_compiler_v1_report_proto_goTypes, + DependencyIndexes: file_buf_compiler_v1_report_proto_depIdxs, + EnumInfos: file_buf_compiler_v1_report_proto_enumTypes, + MessageInfos: file_buf_compiler_v1_report_proto_msgTypes, + }.Build() + File_buf_compiler_v1_report_proto = out.File + file_buf_compiler_v1_report_proto_rawDesc = nil + file_buf_compiler_v1_report_proto_goTypes = nil + file_buf_compiler_v1_report_proto_depIdxs = nil +} diff --git a/internal/golden/golden.go b/internal/golden/golden.go new file mode 100644 index 00000000..bd880ff6 --- /dev/null +++ b/internal/golden/golden.go @@ -0,0 +1,253 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package golden provides a framework for writing file-based golden tests. +// +// The primary entry-point is [Corpus]. Define a new corpus in an ordinary Go +// test body and call [Corpus.Run] to execute it. +// +// Corpora can be "refreshed" automatically to update the golden test corpus +// with new data generated by the test instead of comparing it. To do this, +// run the test with the environment variable that [Corpus].Refresh names set +// to a file glob for all test files to regenerate expectations for. +package golden + +import ( + "errors" + "fmt" + "io/fs" + "os" + "path" + "path/filepath" + "runtime" + "runtime/debug" + "strings" + "testing" + + "github.com/bmatcuk/doublestar/v4" + "github.com/pmezard/go-difflib/difflib" +) + +// A corpus describes a test data corpus. This is essentially a way for doing table-driven +// tests where the "table" is in your file system. +type Corpus struct { + // The root of the test data directory. This path is relative to the directory of + // the file that calls [Corpus.Run]. + Root string + + // An environment variable to check with regards to whether to run in "refresh" + // mode or not. + Refresh string + + // The file extension (without a dot) of files which define a test case, + // e.g. "proto". + Extension string + + // Possible outputs of the test, which are found using Outputs.Extension. + // If the file for a particular output is missing, it is implicitly treated + // as being expected to be empty (i.e., if the file Output[n].Extension + // specifies does not exist, then Output[n].Compare is passed the empty string + // as the "want" value). + Outputs []Output +} + +// Run executes a golden test. +// +// The test function executes a single test case in the corpus, and writes the results to +// the entries of output, which will be the same length as Corpus.Outputs. +// +// test should write to outputs as early as possible to ensure that, if test panics, successfully +// created test output can still be shown to the user. +func (c Corpus) Run(t *testing.T, test func(t *testing.T, path, text string, outputs []string)) { + testDir := callerDir(0) + root := filepath.Join(testDir, c.Root) + t.Logf("corpora: searching for files in %q", root) + + // Enumerate the tests to run by walking the filesystem. + var tests []string + err := filepath.Walk(root, func(p string, fi fs.FileInfo, err error) error { + if err != nil { + return err + } + if !fi.IsDir() && strings.TrimPrefix(path.Ext(p), ".") == c.Extension { + tests = append(tests, p) + } + return err + }) + if err != nil { + t.Fatal("corpora: error while stating testdata FS:", err) + } + + // Check if a refresh has been requested. + var refresh string + if c.Refresh != "" { + refresh = os.Getenv(c.Refresh) + if !doublestar.ValidatePattern(refresh) { + t.Fatalf("invalid glob: ") + } + } + + if refresh != "" { + t.Logf("corpora: refreshing test data because %s=%s", c.Refresh, refresh) + t.Fail() + } + + // Execute the tests. + for _, path := range tests { + path := path // Avoid loop variable capture. + + name, _ := filepath.Rel(testDir, path) + t.Run(name, func(t *testing.T) { + t.Parallel() + + bytes, err := os.ReadFile(path) + if err != nil { + t.Fatalf("corpora: error while loading input file %q: %v", path, err) + } + + input := string(bytes) + results := make([]string, len(c.Outputs)) + + //nolint:revive,predeclared // it's fine to use panic as a name here. + panic, panicStack := catch(func() { test(t, name, input, results) }) + if panic != nil { + t.Logf("test panicked: %v\n%s", panic, panicStack) + t.Fail() + } + + // If we panic, continue to run the tests. This helps with observability + // by getting test results we managed to compute into a form the user can + // inspect. + + refresh, _ := doublestar.Match(refresh, name) + for i, output := range c.Outputs { + if panic != nil && results[i] == "" { + // If we panicked and the result is empty, this means there's a good + // chance this result was not written to, so we skip doing anything + // that would potentially be noisy. + continue + } + + path := fmt.Sprint(path, ".", output.Extension) + + if !refresh { + bytes, err := os.ReadFile(path) + + if err != nil && !errors.Is(err, os.ErrNotExist) { + t.Logf("corpora: error while loading output file %q: %v", path, err) + t.Fail() + continue + } + + cmp := output.Compare + if cmp == nil { + cmp = CompareAndDiff + } + if err := cmp(results[i], string(bytes)); err != "" { + t.Logf("output mismatch for %q:\n%s", path, err) + t.Fail() + continue + } + } else { + if results[i] == "" { + err := os.Remove(path) + if err != nil && !errors.Is(err, os.ErrNotExist) { + t.Logf("corpora: error while deleting output file %q: %v", path, err) + t.Fail() + } + } else { + err := os.WriteFile(path, []byte(results[i]), 0600) + if err != nil { + t.Logf("corpora: error while writing output file %q: %v", path, err) + t.Fail() + } + } + } + } + }) + } +} + +// Output represents the output of a test case. +type Output struct { + // The extension of the output. This is a suffix to the name of the + // testcase's main file; so if Corpus.Extension is "proto", and this is + // "stderr", for a test "foo.proto" the test runner will look for files + // named "foo.proto.stderr". + Extension string + + // The comparison function for this output. If nil, defaults to + // [CompareAndDiff]. + Compare CompareFunc +} + +// CompareFunc is a comparison function between strings, used in [Output]. +// +// Returns empty string if the strings match, otherwise returns an error message. +type CompareFunc func(got, want string) string + +// CompareAndDiff is a [CompareFunc] that returns a colorized diff of the two +// strings if they are not equal. +func CompareAndDiff(got, want string) string { + if got == want { + return "" + } + + diff, err := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ + A: difflib.SplitLines(want), + B: difflib.SplitLines(got), + FromFile: "want", + ToFile: "got", + Context: 2, + }) + if err != nil { + return err.Error() + } + + // Colorize the diff so it's easier to read. We're looking for lines that + // start or end with a - or a +. + lines := strings.Split(diff, "\n") + for i := range lines { + s := lines[i] + if strings.HasPrefix(s, "+") { + lines[i] = "\033[1;92m" + s + "\033[0m" + } else if strings.HasPrefix(s, "-") { + lines[i] = "\033[1;91m" + s + "\033[0m" + } + } + + return strings.Join(lines, "\n") +} + +func callerDir(skip int) string { + _, file, _, ok := runtime.Caller(skip + 2) + if !ok { + panic("corpora: could not determine test file's directory") + } + return filepath.Dir(file) +} + +// catch runs cb and places any panic it results in panic. +// +//nolint:revive,predeclared // it's fine to use panic as a name here. +func catch(cb func()) (panic any, stack []byte) { + defer func() { + panic = recover() + if panic != nil { + stack = debug.Stack() + } + }() + cb() + return +} diff --git a/proto/buf/compiler/v1/ast.proto b/proto/buf/compiler/v1/ast.proto new file mode 100644 index 00000000..dfdda703 --- /dev/null +++ b/proto/buf/compiler/v1/ast.proto @@ -0,0 +1,356 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package buf.compiler.v1; + +import "buf/compiler/v1/report.proto"; + +// NOTE: In this file, we number all Span-typed fields together, starting +// at tag 10. They must also be suffixed with `_span`. + +// A parsed AST file. This is the root file for the whole Protocompile AST. +message File { + // The original filesystem file this file was parsed from. + buf.compiler.v1.Report.File file = 1; + // Declararations in this file. + repeated Decl decls = 2; +} + +// A source code span for a specific `File`. +// +// This only contains byte offsets for the span; all other information +// (such as the line number) should be re-computed as needed. +message Span { + uint32 start = 1; + uint32 end = 2; +} + +// A path in a Protobuf file. +// +// Protobuf paths are quite complex: they are not just dot-separated squences +// of names: each component can itself be a Protobuf path, such as +// `foo.(bar.(baz)).zing`. Although deep nesting is not currently used in +// the language, this AST supports it. +message Path { + // A path component. + message Component { + enum Separator { + SEPARATOR_UNSPECIFIED = 0; + SEPARATOR_DOT = 1; + SEPARATOR_SLASH = 2; + } + + // May be missing altogehter, for invalid paths like `foo..bar`. + oneof component { + // A single identifier. + string ident = 1; + // A nested extension path. + Path extension = 2; + } + + // The type of separator this component had before it. + // If this is SEPARATOR_UNSPECIFIED, this is the first + // component, and the path is not absolute. + Separator separator = 3; + + // The span of the component's value. + Span component_span = 10; + // The span of this component's leading dot, if any. + Span separator_span = 11; + } + + repeated Component components = 1; + + // The span for the whole path. + Span span = 10; +} + +// A declaration in a Protobuf file. +message Decl { + // An empty declaration. + message Empty { + Span span = 10; + } + + // A language pragma, such as a syntax or edition declaration. + message Syntax { + enum Kind { + KIND_UNSPECIFIED = 0; + KIND_SYNTAX = 1; + KIND_EDITION = 2; + } + + Kind kind = 1; + Expr value = 2; + Options options = 3; + + Span span = 10; + Span keyword_span = 11; + Span equals_span = 12; + Span semicolon_span = 13; + } + + // A package declaration. + message Package { + Path path = 1; + Options options = 2; + + Span span = 10; + Span keyword_span = 11; + Span semicolon_span = 12; + } + + // An import declaration. + message Import { + enum Modifier { + MODIFIER_UNSPECIFIED = 0; + MODIFIER_WEAK = 1; + MODIFIER_PUBLIC = 2; + } + + Modifier modifier = 1; + Expr import_path = 2; + Options options = 3; + + Span span = 10; + Span keyword_span = 11; + Span modifier_span = 12; + Span import_path_span = 13; + Span semicolon_span = 14; + } + + // The body of a message, enum, or similar declaration, which + // itself contains declarations. + message Body { + repeated Decl decls = 1; + + Span span = 10; + } + + // An extensions or reserved range within a message. Both productions are + // extremely similar, so they share an AST node. + message Range { + enum Kind { + KIND_UNSPECIFIED = 0; + KIND_EXTENSIONS = 1; + KIND_RESERVED = 2; + } + + Kind kind = 1; + repeated Expr ranges = 2; + Options options = 3; + + Span span = 10; + Span keyword_span = 11; + Span semicolon_span = 12; + } + + oneof decl { + Empty empty = 1; + Syntax syntax = 2; + Import import = 3; + Package package = 4; + Def def = 5; + Body body = 6; + Range range = 7; + } +} + +// A definition is a particular kind of declaration that combines the syntactic +// elements of type definitions, fields, options, and service methods. +// +// This allows the parser to accept and represent many invalid but plausible productions. +message Def { + enum Kind { + KIND_UNSPECIFIED = 0; + KIND_MESSAGE = 1; + KIND_ENUM = 2; + KIND_SERVICE = 3; + KIND_EXTEND = 4; + KIND_FIELD = 5; + KIND_ENUM_VALUE = 6; + KIND_ONEOF = 7; + KIND_GROUP = 8; + KIND_METHOD = 9; + KIND_OPTION = 10; + } + + // Definitions without a clear kind may be marked as `KIND_UNSPECIFIED`. + Kind kind = 1; + Path name = 2; + + // The type for a `KIND_FIELD` definiion. + Type type = 3; + + // A method signature. This appears on `KIND_METHOD`, for example. + message Signature { + repeated Type inputs = 1; + repeated Type outputs = 2; + + Span span = 10; + Span input_span = 11; + Span returns_span = 12; + Span output_span = 13; + } + Signature signature = 4; + + // This is the tag number of `KIND_FIELD` or `KIND_ENUM_VALUE, + // or the value of `KIND_OPTION`. + Expr value = 5; + + // This is options appearing in `[...]`, such as on `KIND_FIELD` + // or `KIND_GROUP`. This will NOT inclde options on a oneof, since + // those are reprepresented as `KIND_OPTION` `Def` in `body`. + Options options = 6; + + // This is a braced body at the end of the definition. + Decl.Body body = 7; + + Span span = 10; + Span keyword_span = 11; + Span equals_span = 12; + Span semicolon_span = 13; +} + +// Compact options after a declaration, in `[...]`. +message Options { + message Entry { + Path path = 1; + Expr value = 2; + + Span equals_span = 10; + } + repeated Entry entries = 1; + + Span span = 10; +} + +// An expression, such as the value of an option or the tag of a field. +message Expr { + // A literal value: a number or a string. + message Literal { + // None of these may be set, in the case of an integer with an invalid or + // out-of-range format. + oneof value { + uint64 int_value = 1; + double float_value = 2; + string string_value = 3; + } + + Span span = 10; + } + + // An expression with some kind of prefix, such as a minus sign. + message Prefixed { + enum Prefix { + PREFIX_UNSPECIFIED = 0; + PREFIX_MINUS = 1; + } + + Prefix prefix = 1; + Expr expr = 2; + + Span span = 10; + Span prefix_span = 11; + } + + // A range expression, i.e. something like `1 to 10`. The `1 to max` is not + // speicial syntax; `max` is realized as a path expression. + // + // Ranges are inclusive. + message Range { + Expr start = 1; + Expr end = 2; + + Span span = 10; + Span to_span = 11; + } + + // An array literal, a sequence of expressions bound by square brackets. + message Array { + repeated Expr elements = 1; + + Span span = 10; + } + + // A dictionary literal, a sequence of key-value pairs bound by curly braces. + message Dict { + repeated Expr.Kv entries = 1; + + Span span = 10; + } + + // A key-value pair expression, which usually will appear inside of an + // `Expr.Dict`. + message Kv { + Expr key = 1; + Expr value = 2; + + Span span = 10; + Span colon_span = 11; + } + + oneof expr { + Literal literal = 1; + Path path = 2; + Prefixed prefixed = 3; + Range range = 4; + Array array = 5; + Dict dict = 6; + Kv kv = 7; + } +} + +// A type, such as the prefix of a field. +// +// This AST includes many types not present in ordinary Protobuf, such as representations +// for `repeated repeated int32` and `Arbitrary`, among others. +message Type { + // A type with a modifier prefix in front of it, such as `repeated` or `stream`. + message Prefixed { + enum Prefix { + PREFIX_UNSPECIFIED = 0; + PREFIX_OPTIONAL = 1; + PREFIX_REPEATED = 2; + PREFIX_REQUIRED = 3; + PREFIX_STREAM = 4; + } + + Prefix prefix = 1; + Type type = 2; + + Span span = 10; + Span prefix_span = 11; + } + + // A type with generic arguments, such as `map`. + // + // Note that no other generic types are part of Protobuf, but we support arbitrary generic + // types since it is a more natural way to define the AST. + message Generic { + Path path = 1; + repeated Type args = 2; + + Span span = 10; + Span bracket_span = 11; + } + + oneof type { + Path path = 1; + Prefixed prefixed = 2; + Generic generic = 3; + } +} diff --git a/proto/buf/compiler/v1/report.proto b/proto/buf/compiler/v1/report.proto new file mode 100644 index 00000000..9c8e5b73 --- /dev/null +++ b/proto/buf/compiler/v1/report.proto @@ -0,0 +1,88 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package buf.compiler.v1; + +// A diagnostic report, consisting of `Diagnostics` and the `File`s they diagnose. +message Report { + // A file involved in a diagnostic `Report`. + message File { + // The path to this file. Does not need to be meaningful as a file-system path. + string path = 1; + + // The textual contents of this file. + bytes text = 2; + } + + repeated File files = 1; + repeated Diagnostic diagnostics = 2; +} + +// A diagnostic within a `Report`. +message Diagnostic { + // A diagnostic level. This affects how (and whether) it is shown to users. + enum Level { + LEVEL_UNSPECIFIED = 0; + LEVEL_ERROR = 1; + LEVEL_WARNING = 2; + LEVEL_REMARK = 3; + } + + // A file annotation within a `Diagnostic`. This corresponds to a single + // span of source code in a `Report`'s file. + message Annotation { + // A message to show under this snippet. May be empty. + string message = 1; + + // Whether this is a "primary" snippet, which is used for deciding whether or not + // to mark the snippet with the same color as the overall diagnostic. + bool primary = 2; + + // The index of `Report.files` of the file this annotation is for. + // + // This is not a whole `Report.File` to help keep serialized reports slim. This + // avoids neeidng to duplicate the whole text of the file one for every annotation. + uint32 file = 3; + + // The start offset of the annotated snippet, in bytes. + uint32 start = 4; + // The end offset of the annotated snippet, in bytes. + uint32 end = 5; + } + + // Required. The message to show for this diagnostic. This should fit on one line. + string message = 1; + + // Required. The level for this diagnostic. + Level level = 2; + + // An optional path to show in the diagnostic, if it has no annotations. + // This is useful for e.g. diagnostics that would have no spans. + string in_file = 3; + + // Annotations for source code relevant to this diagnostic. + repeated Annotation annotations = 4; + + // Notes about the error to show to the user. May span multiple lines. + repeated string notes = 5; + // Helpful suggestions to the user. + repeated string help = 6; + + // Debugging information related to the diagnostic. This should only be + // used for information about debugging a tool or compiler that emits the + // diagnostic, not the code being diagnosed. + repeated string debug = 7; +}