diff --git a/examples/helloworld/README.md b/examples/helloworld/README.md index 924681fd..e3c2b345 100644 --- a/examples/helloworld/README.md +++ b/examples/helloworld/README.md @@ -17,6 +17,16 @@ $ mix run --no-halt $ mix run priv/client.exs ``` +## HTTP Transcoding + +``` shell +# Say hello +curl -H 'Content-type: application/json' http://localhost:50051/v1/greeter/test + +# Say hello from +curl -XPOST -H 'Content-type: application/json' -d '{"name": "test", "from": "anon"}' http://localhost:50051/v1/greeter +``` + ## Regenerate Elixir code from proto 1. Modify the proto `priv/protos/helloworld.proto` @@ -26,8 +36,10 @@ $ mix run priv/client.exs mix escript.install hex protobuf ``` 4. Generate the code: + ```shell -$ protoc -I priv/protos --elixir_out=plugins=grpc:./lib/ priv/protos/helloworld.proto +$ (cd ../../; mix build_protobuf_escript && mix escript.build) +$ protoc -I priv/protos --elixir_out=:./lib/ --grpc_elixir_out=./lib --plugin="../../deps/protobuf/protoc-gen-elixir" --plugin="../../protoc-gen-grpc_elixir" priv/protos/helloworld.proto ``` Refer to [protobuf-elixir](https://github.com/tony612/protobuf-elixir#usage) for more information. diff --git a/examples/helloworld/lib/helloworld.pb.ex b/examples/helloworld/lib/helloworld.pb.ex index c075448e..b7b5c74a 100644 --- a/examples/helloworld/lib/helloworld.pb.ex +++ b/examples/helloworld/lib/helloworld.pb.ex @@ -5,6 +5,14 @@ defmodule Helloworld.HelloRequest do field :name, 1, type: :string end +defmodule Helloworld.HelloRequestFrom do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :name, 1, type: :string + field :from, 2, type: :string +end + defmodule Helloworld.HelloReply do @moduledoc false use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 @@ -13,14 +21,16 @@ defmodule Helloworld.HelloReply do field :today, 2, type: Google.Protobuf.Timestamp end -defmodule Helloworld.Greeter.Service do +defmodule Helloworld.GetMessageRequest do @moduledoc false - use GRPC.Service, name: "helloworld.Greeter", protoc_gen_elixir_version: "0.11.0" + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 - rpc :SayHello, Helloworld.HelloRequest, Helloworld.HelloReply + field :name, 1, type: :string end -defmodule Helloworld.Greeter.Stub do +defmodule Helloworld.Message do @moduledoc false - use GRPC.Stub, service: Helloworld.Greeter.Service + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :text, 1, type: :string end diff --git a/examples/helloworld/lib/helloworld.svc.ex b/examples/helloworld/lib/helloworld.svc.ex new file mode 100644 index 00000000..dcf882ab --- /dev/null +++ b/examples/helloworld/lib/helloworld.svc.ex @@ -0,0 +1,65 @@ +defmodule Helloworld.Greeter.Service do + @moduledoc false + + use GRPC.Service, name: "helloworld.Greeter", protoc_gen_elixir_version: "0.11.0" + + rpc(:SayHello, Helloworld.HelloRequest, Helloworld.HelloReply, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/greeter/{name}"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:SayHelloFrom, Helloworld.HelloRequestFrom, Helloworld.HelloReply, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "*", + pattern: {:post, "/v1/greeter"}, + response_body: "", + selector: "" + } + } + }) +end + +defmodule Helloworld.Greeter.Stub do + @moduledoc false + + use GRPC.Stub, service: Helloworld.Greeter.Service +end + +defmodule Helloworld.Messaging.Service do + @moduledoc false + + use GRPC.Service, name: "helloworld.Messaging", protoc_gen_elixir_version: "0.11.0" + + rpc(:GetMessage, Helloworld.GetMessageRequest, Helloworld.Message, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/{name=messages/*}"}, + response_body: "", + selector: "" + } + } + }) +end + +defmodule Helloworld.Messaging.Stub do + @moduledoc false + + use GRPC.Stub, service: Helloworld.Messaging.Service +end diff --git a/examples/helloworld/lib/server.ex b/examples/helloworld/lib/server.ex index b85241f8..68c72c10 100644 --- a/examples/helloworld/lib/server.ex +++ b/examples/helloworld/lib/server.ex @@ -1,16 +1,31 @@ defmodule Helloworld.Greeter.Server do - use GRPC.Server, service: Helloworld.Greeter.Service + use GRPC.Server, + service: Helloworld.Greeter.Service, + http_transcode: true @spec say_hello(Helloworld.HelloRequest.t(), GRPC.Server.Stream.t()) :: Helloworld.HelloReply.t() def say_hello(request, _stream) do + Helloworld.HelloReply.new( + message: "Hello #{request.name}", + today: today() + ) + end + + @spec say_hello_from(Helloworld.HelloFromRequest.t(), GRPC.Server.Stream.t()) :: + Helloworld.HelloReply.t() + def say_hello_from(request, _stream) do + Helloworld.HelloReply.new( + message: "Hello #{request.name}. From #{request.from}", + today: today() + ) + end + + defp today do nanos_epoch = System.system_time() |> System.convert_time_unit(:native, :nanosecond) seconds = div(nanos_epoch, 1_000_000_000) nanos = nanos_epoch - seconds * 1_000_000_000 - Helloworld.HelloReply.new( - message: "Hello #{request.name}", - today: %Google.Protobuf.Timestamp{seconds: seconds, nanos: nanos} - ) + %Google.Protobuf.Timestamp{seconds: seconds, nanos: nanos} end end diff --git a/examples/helloworld/mix.exs b/examples/helloworld/mix.exs index eacf11f4..4b803c6b 100644 --- a/examples/helloworld/mix.exs +++ b/examples/helloworld/mix.exs @@ -19,6 +19,7 @@ defmodule Helloworld.Mixfile do defp deps do [ {:grpc, path: "../../"}, + {:jason, "~> 1.3.0"}, {:protobuf, "~> 0.11"}, {:google_protos, "~> 0.3.0"}, {:dialyxir, "~> 1.1", only: [:dev, :test], runtime: false} diff --git a/examples/helloworld/mix.lock b/examples/helloworld/mix.lock index 235128c0..df73eb18 100644 --- a/examples/helloworld/mix.lock +++ b/examples/helloworld/mix.lock @@ -5,6 +5,7 @@ "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, "google_protos": {:hex, :google_protos, "0.3.0", "15faf44dce678ac028c289668ff56548806e313e4959a3aaf4f6e1ebe8db83f4", [:mix], [{:protobuf, "~> 0.10", [hex: :protobuf, repo: "hexpm", optional: false]}], "hexpm", "1f6b7fb20371f72f418b98e5e48dae3e022a9a6de1858d4b254ac5a5d0b4035f"}, "gun": {:hex, :grpc_gun, "2.0.1", "221b792df3a93e8fead96f697cbaf920120deacced85c6cd3329d2e67f0871f8", [:rebar3], [{:cowlib, "~> 2.11", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "795a65eb9d0ba16697e6b0e1886009ce024799e43bb42753f0c59b029f592831"}, + "jason": {:hex, :jason, "1.3.0", "fa6b82a934feb176263ad2df0dbd91bf633d4a46ebfdffea0c8ae82953714946", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "53fc1f51255390e0ec7e50f9cb41e751c260d065dcba2bf0d08dc51a4002c2ac"}, "protobuf": {:hex, :protobuf, "0.11.0", "58d5531abadea3f71135e97bd214da53b21adcdb5b1420aee63f4be8173ec927", [:mix], [{:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "30ad9a867a5c5a0616cac9765c4d2c2b7b0030fa81ea6d0c14c2eb5affb6ac52"}, "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, } diff --git a/examples/helloworld/priv/protos/google/api/annotations.proto b/examples/helloworld/priv/protos/google/api/annotations.proto new file mode 100644 index 00000000..efdab3db --- /dev/null +++ b/examples/helloworld/priv/protos/google/api/annotations.proto @@ -0,0 +1,31 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/examples/helloworld/priv/protos/google/api/http.proto b/examples/helloworld/priv/protos/google/api/http.proto new file mode 100644 index 00000000..113fa936 --- /dev/null +++ b/examples/helloworld/priv/protos/google/api/http.proto @@ -0,0 +1,375 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} diff --git a/examples/helloworld/priv/protos/helloworld.proto b/examples/helloworld/priv/protos/helloworld.proto index 12849981..632519a0 100644 --- a/examples/helloworld/priv/protos/helloworld.proto +++ b/examples/helloworld/priv/protos/helloworld.proto @@ -5,6 +5,7 @@ option java_package = "io.grpc.examples.helloworld"; option java_outer_classname = "HelloWorldProto"; option objc_class_prefix = "HLW"; +import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; package helloworld; @@ -12,7 +13,18 @@ package helloworld; // The greeting service definition. service Greeter { // Sends a greeting - rpc SayHello (HelloRequest) returns (HelloReply) {} + rpc SayHello (HelloRequest) returns (HelloReply) { + option (google.api.http) = { + get: "/v1/greeter/{name}" + }; + } + + rpc SayHelloFrom (HelloRequestFrom) returns (HelloReply) { + option (google.api.http) = { + post: "/v1/greeter" + body: "*" + }; + } } // The request message containing the user's name. @@ -20,8 +32,31 @@ message HelloRequest { string name = 1; } +// HelloRequestFrom! +message HelloRequestFrom { + // Name! + string name = 1; + // From! + string from = 2; +} + // The response message containing the greetings message HelloReply { string message = 1; google.protobuf.Timestamp today = 2; } + +service Messaging { + rpc GetMessage(GetMessageRequest) returns (Message) { + option (google.api.http) = { + get: "/v1/{name=messages/*}" + }; + } +} + +message GetMessageRequest { + string name = 1; // Mapped to URL path. +} +message Message { + string text = 1; // The resource content. +} diff --git a/examples/helloworld_transcoding/.gitignore b/examples/helloworld_transcoding/.gitignore new file mode 100644 index 00000000..06dbcb6f --- /dev/null +++ b/examples/helloworld_transcoding/.gitignore @@ -0,0 +1,23 @@ +# The directory Mix will write compiled artifacts to. +/_build + +# If you run "mix test --cover", coverage assets end up here. +/cover + +# The directory Mix downloads your dependencies sources to. +/deps + +# Where 3rd-party dependencies like ExDoc output generated docs. +/doc + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +/priv/grpc_c.so* +/src/grpc_c +/tmp + +/log \ No newline at end of file diff --git a/examples/helloworld_transcoding/README.md b/examples/helloworld_transcoding/README.md new file mode 100644 index 00000000..66624ce0 --- /dev/null +++ b/examples/helloworld_transcoding/README.md @@ -0,0 +1,83 @@ +# Helloworld with HTTP/json transcoding in grpc-elixir + +## Usage + +1. Install deps and compile +```shell +$ mix do deps.get, compile +``` + +2. Run the server +```shell +$ mix run --no-halt +``` + +3. Run the client script +```shell +$ mix run priv/client.exs +``` + +## HTTP Transcoding + +``` shell +# Say hello +$ curl -H 'accept: application/json' http://localhost:50051/v1/greeter/test + +# Say hello from +$ curl -XPOST -H 'Content-type: application/json' -d '{"name": "test", "from": "anon"}' http://localhost:50051/v1/greeter +``` + +## Regenerate Elixir code from proto + +1. Modify the proto `priv/protos/helloworld.proto` + +2. Install `protoc` [here](https://developers.google.com/protocol-buffers/docs/downloads) + +``` +mix deps.get +``` + +4. Generate `google.api.http` extensions: + +``` shell +$ mix protobuf.generate --include-path=priv/protos --output-path=./lib priv/protos/google/api/annotations.proto priv/protos/google/api/http.proto +``` + +4. Generate the code: + +```shell +$ mix protobuf.generate --include-path=priv/protos --plugins=ProtobufGenerate.Plugins.GRPCWithOptions --output-path=./lib priv/protos/helloworld.proto +``` + +Refer to [protobuf-elixir](https://github.com/tony612/protobuf-elixir#usage) for more information. + +## How to start server when starting your application? + +Pass `start_server: true` as an option for the `GRPC.Server.Supervisor` in your supervision tree. + +## Benchmark + +Using [ghz](https://ghz.sh/) + +``` +$ MIX_ENV=prod iex -S mix +# Now cowboy doesn't work well with concurrency in a connection, like --concurrency 6 --connections 1 +$ ghz --insecure --proto priv/protos/helloworld.proto --call helloworld.Greeter.SayHello -d '{"name":"Joe"}' -z 10s --concurrency 6 --connections 6 127.0.0.1:50051 +# The result is for branch improve-perf +Summary: + Count: 124239 + Total: 10.00 s + Slowest: 18.85 ms + Fastest: 0.18 ms + Average: 0.44 ms + Requests/sec: 12423.71 + +# Go +Summary: + Count: 258727 + Total: 10.00 s + Slowest: 5.39 ms + Fastest: 0.09 ms + Average: 0.19 ms + Requests/sec: 25861.68 +``` diff --git a/examples/helloworld_transcoding/config/config.exs b/examples/helloworld_transcoding/config/config.exs new file mode 100644 index 00000000..9def7c2c --- /dev/null +++ b/examples/helloworld_transcoding/config/config.exs @@ -0,0 +1,3 @@ +import Config + +import_config "#{Mix.env}.exs" diff --git a/examples/helloworld_transcoding/config/dev.exs b/examples/helloworld_transcoding/config/dev.exs new file mode 100644 index 00000000..becde769 --- /dev/null +++ b/examples/helloworld_transcoding/config/dev.exs @@ -0,0 +1 @@ +import Config diff --git a/examples/helloworld_transcoding/config/prod.exs b/examples/helloworld_transcoding/config/prod.exs new file mode 100644 index 00000000..2dd33c31 --- /dev/null +++ b/examples/helloworld_transcoding/config/prod.exs @@ -0,0 +1,4 @@ +import Config + +config :logger, + level: :warn diff --git a/examples/helloworld_transcoding/config/test.exs b/examples/helloworld_transcoding/config/test.exs new file mode 100644 index 00000000..becde769 --- /dev/null +++ b/examples/helloworld_transcoding/config/test.exs @@ -0,0 +1 @@ +import Config diff --git a/examples/helloworld_transcoding/lib/endpoint.ex b/examples/helloworld_transcoding/lib/endpoint.ex new file mode 100644 index 00000000..70533a48 --- /dev/null +++ b/examples/helloworld_transcoding/lib/endpoint.ex @@ -0,0 +1,6 @@ +defmodule Helloworld.Endpoint do + use GRPC.Endpoint + + intercept GRPC.Logger.Server + run Helloworld.Greeter.Server +end diff --git a/examples/helloworld_transcoding/lib/google/api/annotations.pb.ex b/examples/helloworld_transcoding/lib/google/api/annotations.pb.ex new file mode 100644 index 00000000..48d40932 --- /dev/null +++ b/examples/helloworld_transcoding/lib/google/api/annotations.pb.ex @@ -0,0 +1,8 @@ +defmodule Google.Api.PbExtension do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + extend Google.Protobuf.MethodOptions, :http, 72_295_728, + optional: true, + type: Google.Api.HttpRule +end \ No newline at end of file diff --git a/examples/helloworld_transcoding/lib/google/api/http.pb.ex b/examples/helloworld_transcoding/lib/google/api/http.pb.ex new file mode 100644 index 00000000..524a0598 --- /dev/null +++ b/examples/helloworld_transcoding/lib/google/api/http.pb.ex @@ -0,0 +1,40 @@ +defmodule Google.Api.Http do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :rules, 1, repeated: true, type: Google.Api.HttpRule + + field :fully_decode_reserved_expansion, 2, + type: :bool, + json_name: "fullyDecodeReservedExpansion" +end + +defmodule Google.Api.HttpRule do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + oneof :pattern, 0 + + field :selector, 1, type: :string + field :get, 2, type: :string, oneof: 0 + field :put, 3, type: :string, oneof: 0 + field :post, 4, type: :string, oneof: 0 + field :delete, 5, type: :string, oneof: 0 + field :patch, 6, type: :string, oneof: 0 + field :custom, 8, type: Google.Api.CustomHttpPattern, oneof: 0 + field :body, 7, type: :string + field :response_body, 12, type: :string, json_name: "responseBody" + + field :additional_bindings, 11, + repeated: true, + type: Google.Api.HttpRule, + json_name: "additionalBindings" +end + +defmodule Google.Api.CustomHttpPattern do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :kind, 1, type: :string + field :path, 2, type: :string +end \ No newline at end of file diff --git a/examples/helloworld_transcoding/lib/helloworld.pb.ex b/examples/helloworld_transcoding/lib/helloworld.pb.ex new file mode 100644 index 00000000..a49adbf6 --- /dev/null +++ b/examples/helloworld_transcoding/lib/helloworld.pb.ex @@ -0,0 +1,55 @@ +defmodule Helloworld.HelloRequest do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :name, 1, type: :string +end + +defmodule Helloworld.HelloRequestFrom do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :name, 1, type: :string + field :from, 2, type: :string +end + +defmodule Helloworld.HelloReply do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :message, 1, type: :string + field :today, 2, type: Google.Protobuf.Timestamp +end + +defmodule Helloworld.Greeter.Service do + @moduledoc false + use GRPC.Service, name: "helloworld.Greeter", protoc_gen_elixir_version: "0.11.0" + + rpc(:SayHello, Helloworld.HelloRequest, Helloworld.HelloReply, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/greeter/{name}"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:SayHelloFrom, Helloworld.HelloRequestFrom, Helloworld.HelloReply, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "*", + pattern: {:post, "/v1/greeter"}, + response_body: "", + selector: "" + } + } + }) +end diff --git a/examples/helloworld_transcoding/lib/helloworld_app.ex b/examples/helloworld_transcoding/lib/helloworld_app.ex new file mode 100644 index 00000000..d84d62a5 --- /dev/null +++ b/examples/helloworld_transcoding/lib/helloworld_app.ex @@ -0,0 +1,12 @@ +defmodule HelloworldApp do + use Application + + def start(_type, _args) do + children = [ + {GRPC.Server.Supervisor, endpoint: Helloworld.Endpoint, port: 50051, start_server: true} + ] + + opts = [strategy: :one_for_one, name: HelloworldApp] + Supervisor.start_link(children, opts) + end +end diff --git a/examples/helloworld_transcoding/lib/server.ex b/examples/helloworld_transcoding/lib/server.ex new file mode 100644 index 00000000..68c72c10 --- /dev/null +++ b/examples/helloworld_transcoding/lib/server.ex @@ -0,0 +1,31 @@ +defmodule Helloworld.Greeter.Server do + use GRPC.Server, + service: Helloworld.Greeter.Service, + http_transcode: true + + @spec say_hello(Helloworld.HelloRequest.t(), GRPC.Server.Stream.t()) :: + Helloworld.HelloReply.t() + def say_hello(request, _stream) do + Helloworld.HelloReply.new( + message: "Hello #{request.name}", + today: today() + ) + end + + @spec say_hello_from(Helloworld.HelloFromRequest.t(), GRPC.Server.Stream.t()) :: + Helloworld.HelloReply.t() + def say_hello_from(request, _stream) do + Helloworld.HelloReply.new( + message: "Hello #{request.name}. From #{request.from}", + today: today() + ) + end + + defp today do + nanos_epoch = System.system_time() |> System.convert_time_unit(:native, :nanosecond) + seconds = div(nanos_epoch, 1_000_000_000) + nanos = nanos_epoch - seconds * 1_000_000_000 + + %Google.Protobuf.Timestamp{seconds: seconds, nanos: nanos} + end +end diff --git a/examples/helloworld_transcoding/mix.exs b/examples/helloworld_transcoding/mix.exs new file mode 100644 index 00000000..f1b13088 --- /dev/null +++ b/examples/helloworld_transcoding/mix.exs @@ -0,0 +1,29 @@ +defmodule Helloworld.Mixfile do + use Mix.Project + + def project do + [ + app: :helloworld, + version: "0.1.0", + elixir: "~> 1.4", + build_embedded: Mix.env() == :prod, + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + def application do + [mod: {HelloworldApp, []}, applications: [:logger, :grpc]] + end + + defp deps do + [ + {:grpc, path: "../../"}, + {:protobuf, "~> 0.11.0"}, + {:protobuf_generate, "~> 0.1.1", only: [:dev, :test]}, + {:jason, "~> 1.3.0"}, + {:google_protos, "~> 0.3.0"}, + {:dialyxir, "~> 1.1", only: [:dev, :test], runtime: false} + ] + end +end diff --git a/examples/helloworld_transcoding/mix.lock b/examples/helloworld_transcoding/mix.lock new file mode 100644 index 00000000..afdfde5e --- /dev/null +++ b/examples/helloworld_transcoding/mix.lock @@ -0,0 +1,12 @@ +%{ + "cowboy": {:hex, :cowboy, "2.9.0", "865dd8b6607e14cf03282e10e934023a1bd8be6f6bacf921a7e2a96d800cd452", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "2c729f934b4e1aa149aff882f57c6372c15399a20d54f65c8d67bef583021bde"}, + "cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"}, + "dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"}, + "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, + "google_protos": {:hex, :google_protos, "0.3.0", "15faf44dce678ac028c289668ff56548806e313e4959a3aaf4f6e1ebe8db83f4", [:mix], [{:protobuf, "~> 0.10", [hex: :protobuf, repo: "hexpm", optional: false]}], "hexpm", "1f6b7fb20371f72f418b98e5e48dae3e022a9a6de1858d4b254ac5a5d0b4035f"}, + "gun": {:hex, :grpc_gun, "2.0.1", "221b792df3a93e8fead96f697cbaf920120deacced85c6cd3329d2e67f0871f8", [:rebar3], [{:cowlib, "~> 2.11", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "795a65eb9d0ba16697e6b0e1886009ce024799e43bb42753f0c59b029f592831"}, + "jason": {:hex, :jason, "1.3.0", "fa6b82a934feb176263ad2df0dbd91bf633d4a46ebfdffea0c8ae82953714946", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "53fc1f51255390e0ec7e50f9cb41e751c260d065dcba2bf0d08dc51a4002c2ac"}, + "protobuf": {:hex, :protobuf, "0.11.0", "58d5531abadea3f71135e97bd214da53b21adcdb5b1420aee63f4be8173ec927", [:mix], [{:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "30ad9a867a5c5a0616cac9765c4d2c2b7b0030fa81ea6d0c14c2eb5affb6ac52"}, + "protobuf_generate": {:hex, :protobuf_generate, "0.1.1", "f6098b85161dcfd48a4f6f1abee4ee5e057981dfc50aafb1aa4bd5b0529aa89b", [:mix], [{:protobuf, "~> 0.11", [hex: :protobuf, repo: "hexpm", optional: false]}], "hexpm", "93a38c8e2aba2a17e293e9ef1359122741f717103984aa6d1ebdca0efb17ab9d"}, + "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, +} diff --git a/examples/helloworld_transcoding/priv/client.exs b/examples/helloworld_transcoding/priv/client.exs new file mode 100644 index 00000000..dc6bea5d --- /dev/null +++ b/examples/helloworld_transcoding/priv/client.exs @@ -0,0 +1,9 @@ +{:ok, channel} = GRPC.Stub.connect("localhost:50051", interceptors: [GRPC.Logger.Client]) + +{:ok, reply} = + channel + |> Helloworld.Greeter.Stub.say_hello(Helloworld.HelloRequest.new(name: "grpc-elixir")) + +# pass tuple `timeout: :infinity` as a second arg to stay in IEx debugging + +IO.inspect(reply) diff --git a/examples/helloworld_transcoding/priv/protos/google/api/annotations.proto b/examples/helloworld_transcoding/priv/protos/google/api/annotations.proto new file mode 100644 index 00000000..efdab3db --- /dev/null +++ b/examples/helloworld_transcoding/priv/protos/google/api/annotations.proto @@ -0,0 +1,31 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/examples/helloworld_transcoding/priv/protos/google/api/http.proto b/examples/helloworld_transcoding/priv/protos/google/api/http.proto new file mode 100644 index 00000000..113fa936 --- /dev/null +++ b/examples/helloworld_transcoding/priv/protos/google/api/http.proto @@ -0,0 +1,375 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} diff --git a/examples/helloworld_transcoding/priv/protos/helloworld.proto b/examples/helloworld_transcoding/priv/protos/helloworld.proto new file mode 100644 index 00000000..55c41005 --- /dev/null +++ b/examples/helloworld_transcoding/priv/protos/helloworld.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; + +option java_multiple_files = true; +option java_package = "io.grpc.examples.helloworld"; +option java_outer_classname = "HelloWorldProto"; +option objc_class_prefix = "HLW"; + +import "google/api/annotations.proto"; +import "google/protobuf/timestamp.proto"; + +package helloworld; + +// The greeting service definition. +service Greeter { + // Sends a greeting + rpc SayHello (HelloRequest) returns (HelloReply) { + option (google.api.http) = { + get: "/v1/greeter/{name}" + }; + } + + rpc SayHelloFrom (HelloRequestFrom) returns (HelloReply) { + option (google.api.http) = { + post: "/v1/greeter" + body: "*" + }; + } +} + +// The request message containing the user's name. +message HelloRequest { + string name = 1; +} + +// HelloRequestFrom! +message HelloRequestFrom { + // Name! + string name = 1; + // From! + string from = 2; +} + +// The response message containing the greetings +message HelloReply { + string message = 1; + google.protobuf.Timestamp today = 2; +} diff --git a/examples/helloworld_transcoding/test/hello_world_test.exs b/examples/helloworld_transcoding/test/hello_world_test.exs new file mode 100644 index 00000000..962d07ac --- /dev/null +++ b/examples/helloworld_transcoding/test/hello_world_test.exs @@ -0,0 +1,16 @@ +defmodule HelloworldTest do + @moduledoc false + + use ExUnit.Case + + setup_all do + {:ok, channel} = GRPC.Stub.connect("localhost:50051", interceptors: [GRPC.Logger.Client]) + [channel: channel] + end + + test "helloworld should be successful", %{channel: channel} do + req = Helloworld.HelloRequest.new(name: "grpc-elixir") + assert {:ok, %{message: msg, today: _}} = Helloworld.Greeter.Stub.say_hello(channel, req) + assert msg == "Hello grpc-elixir" + end +end diff --git a/examples/helloworld_transcoding/test/test_helper.exs b/examples/helloworld_transcoding/test/test_helper.exs new file mode 100644 index 00000000..869559e7 --- /dev/null +++ b/examples/helloworld_transcoding/test/test_helper.exs @@ -0,0 +1 @@ +ExUnit.start() diff --git a/interop/mix.exs b/interop/mix.exs index 0b432a91..05f61478 100644 --- a/interop/mix.exs +++ b/interop/mix.exs @@ -23,7 +23,10 @@ defmodule Interop.MixProject do defp deps do [ {:grpc, path: "..", override: true}, - {:protobuf, "~> 0.11"} + {:protobuf, "~> 0.11.0"}, + {:grpc_statsd, "~> 0.1.0"}, + {:statix, ">= 1.2.1"}, + {:extrace, "~> 0.2"}, ] end end diff --git a/interop/mix.lock b/interop/mix.lock index e24af57b..0533eb90 100644 --- a/interop/mix.lock +++ b/interop/mix.lock @@ -1,11 +1,15 @@ %{ "cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"}, "cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"}, + "extrace": {:hex, :extrace, "0.5.0", "4ee5419fbc3820c4592daebe0f8527001aa623578d9a725d8ae521315fce0277", [:mix], [{:recon, "~> 2.5", [hex: :recon, repo: "hexpm", optional: false]}], "hexpm", "2a3ab7fa0701949efee1034293fa0b0e65926ffe256ccd6d0e10dd8a9406cd02"}, "grpc": {:git, "https://github.com/elixir-grpc/grpc.git", "21422839798e49bf6d29327fab0a7add51becedd", []}, + "grpc_statsd": {:hex, :grpc_statsd, "0.1.0", "a95ae388188486043f92a3c5091c143f5a646d6af80c9da5ee616546c4d8f5ff", [:mix], [{:grpc, ">= 0.0.0", [hex: :grpc, repo: "hexpm", optional: true]}, {:statix, ">= 0.0.0", [hex: :statix, repo: "hexpm", optional: true]}], "hexpm", "de0c05db313c7b3ffeff345855d173fd82fec3de16591a126b673f7f698d9e74"}, "gun": {:hex, :gun, "2.0.1", "160a9a5394800fcba41bc7e6d421295cf9a7894c2252c0678244948e3336ad73", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "a10bc8d6096b9502205022334f719cc9a08d9adcfbfc0dbee9ef31b56274a20b"}, "hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"}, "mint": {:hex, :mint, "1.5.1", "8db5239e56738552d85af398798c80648db0e90f343c8469f6c6d8898944fb6f", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "4a63e1e76a7c3956abd2c72f370a0d0aecddc3976dea5c27eccbecfa5e7d5b1e"}, "protobuf": {:hex, :protobuf, "0.11.0", "58d5531abadea3f71135e97bd214da53b21adcdb5b1420aee63f4be8173ec927", [:mix], [{:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "30ad9a867a5c5a0616cac9765c4d2c2b7b0030fa81ea6d0c14c2eb5affb6ac52"}, "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, + "recon": {:hex, :recon, "2.5.5", "c108a4c406fa301a529151a3bb53158cadc4064ec0c5f99b03ddb8c0e4281bdf", [:mix, :rebar3], [], "hexpm", "632a6f447df7ccc1a4a10bdcfce71514412b16660fe59deca0fcf0aa3c054404"}, + "statix": {:hex, :statix, "1.4.0", "c822abd1e60e62828e8460e932515d0717aa3c089b44cc3f795d43b94570b3a8", [:mix], [], "hexpm", "507373cc80925a9b6856cb14ba17f6125552434314f6613c907d295a09d1a375"}, "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, } diff --git a/lib/google/api/annotations.pb.ex b/lib/google/api/annotations.pb.ex new file mode 100644 index 00000000..374877d3 --- /dev/null +++ b/lib/google/api/annotations.pb.ex @@ -0,0 +1,8 @@ +defmodule Google.Api.PbExtension do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + extend Google.Protobuf.MethodOptions, :http, 72_295_728, + optional: true, + type: Google.Api.HttpRule +end diff --git a/lib/google/api/http.pb.ex b/lib/google/api/http.pb.ex new file mode 100644 index 00000000..25dd83ad --- /dev/null +++ b/lib/google/api/http.pb.ex @@ -0,0 +1,43 @@ +defmodule Google.Api.Http do + @moduledoc false + + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :rules, 1, repeated: true, type: Google.Api.HttpRule + + field :fully_decode_reserved_expansion, 2, + type: :bool, + json_name: "fullyDecodeReservedExpansion" +end + +defmodule Google.Api.HttpRule do + @moduledoc false + + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + oneof :pattern, 0 + + field :selector, 1, type: :string + field :get, 2, type: :string, oneof: 0 + field :put, 3, type: :string, oneof: 0 + field :post, 4, type: :string, oneof: 0 + field :delete, 5, type: :string, oneof: 0 + field :patch, 6, type: :string, oneof: 0 + field :custom, 8, type: Google.Api.CustomHttpPattern, oneof: 0 + field :body, 7, type: :string + field :response_body, 12, type: :string, json_name: "responseBody" + + field :additional_bindings, 11, + repeated: true, + type: Google.Api.HttpRule, + json_name: "additionalBindings" +end + +defmodule Google.Api.CustomHttpPattern do + @moduledoc false + + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :kind, 1, type: :string + field :path, 2, type: :string +end diff --git a/lib/grpc/codec/json.ex b/lib/grpc/codec/json.ex new file mode 100644 index 00000000..d84a75c1 --- /dev/null +++ b/lib/grpc/codec/json.ex @@ -0,0 +1,67 @@ +defmodule GRPC.Codec.JSON do + @moduledoc """ + JSON Codec for gRPC communication. + + This module implements the `GRPC.Codec` behaviour, providing encoding and decoding functions + for JSON serialization in the context of gRPC communication. + + ## Behavior Functions + + - `name/0`: Returns the name of the codec, which is "json". + - `encode/1`: Encodes a struct using the Protobuf.JSON.encode!/1 function. + - `decode/2`: Decodes binary data into a map using the Jason library. + + This module requires the Jason dependency. + """ + @behaviour GRPC.Codec + + def name(), do: "json" + + @doc """ + Encodes a struct using the Protobuf.JSON.encode!/1 function. + + ### Parameters: + + - `struct` - The struct to be encoded. + + ### Returns: + + The encoded binary data. + + ### Example: + + ```elixir + %MyStruct{id: 1, name: "John"} |> GRPC.Codec.JSON.encode() + ``` + + """ + + def encode(struct) do + Protobuf.JSON.encode!(struct) + end + + @doc """ + Decodes binary data into a map using the Jason library. + Parameters: + + binary - The binary data to be decoded. + module - Module to be created. + + Returns: + + A map representing the decoded data. + + Raises: + + Raises an error if the Jason library is not loaded. + + Example: + + ```elixir + binary_data |> GRPC.Codec.JSON.decode(__MODULE__) + ``` + """ + def decode(<<>>, _module), do: %{} + + def decode(binary, _module), do: Jason.decode!(binary) +end diff --git a/lib/grpc/protoc/cli.ex b/lib/grpc/protoc/cli.ex new file mode 100644 index 00000000..c60afae5 --- /dev/null +++ b/lib/grpc/protoc/cli.ex @@ -0,0 +1,226 @@ +defmodule GRPC.Protoc.CLI do + @moduledoc """ + `protoc` plugin for generating Elixir code. + + `protoc-gen-elixir` (this name is important) **must** be in `$PATH`. You are not supposed + to call it directly, but only through `protoc`. + + ## Examples + + $ protoc --elixir_out=./lib your.proto + $ protoc --elixir_out=plugins=grpc:./lib/ *.proto + $ protoc -I protos --elixir_out=./lib protos/namespace/*.proto + + Options: + + * --version Print version of protobuf-elixir + * --help (-h) Print this help + + """ + + alias Protobuf.Protoc.Context + + # Entrypoint for the escript (protoc-gen-elixir). + @doc false + @spec main([String.t()]) :: :ok + def main(args) + + def main(["--version"]) do + {:ok, version} = :application.get_key(:protobuf, :vsn) + IO.puts(version) + end + + def main([opt]) when opt in ["--help", "-h"] do + IO.puts(@moduledoc) + end + + # When called through protoc, all input is passed through stdin. + def main([] = _args) do + Protobuf.load_extensions() + + # See https://groups.google.com/forum/#!topic/elixir-lang-talk/T5enez_BBTI. + :io.setopts(:standard_io, encoding: :latin1) + + # Read the standard input that protoc feeds us. + bin = binread_all!(:stdio) + + request = Protobuf.Decoder.decode(bin, Google.Protobuf.Compiler.CodeGeneratorRequest) + + ctx = + %Context{} + |> parse_params(request.parameter || "") + |> find_types(request.proto_file, request.file_to_generate) + + files = + Enum.flat_map(request.file_to_generate, fn file -> + desc = Enum.find(request.proto_file, &(&1.name == file)) + GRPC.Protoc.Generator.generate(ctx, desc) + end) + + %Google.Protobuf.Compiler.CodeGeneratorResponse{ + file: files, + supported_features: supported_features() + } + |> Protobuf.encode_to_iodata() + |> IO.binwrite() + end + + def main(_args) do + raise "invalid arguments. See protoc-gen-elixir --help." + end + + def supported_features() do + # The only available feature is proto3 with optional fields. + # This is backwards compatible with proto2 optional fields. + Google.Protobuf.Compiler.CodeGeneratorResponse.Feature.value(:FEATURE_PROTO3_OPTIONAL) + end + + # Made public for testing. + @doc false + def parse_params(%Context{} = ctx, params_str) when is_binary(params_str) do + params_str + |> String.split(",") + |> Enum.reduce(ctx, &parse_param/2) + end + + defp parse_param("plugins=" <> plugins, ctx) do + %Context{ctx | plugins: String.split(plugins, "+")} + end + + defp parse_param("gen_descriptors=" <> value, ctx) do + case value do + "true" -> + %Context{ctx | gen_descriptors?: true} + + other -> + raise "invalid value for gen_descriptors option, expected \"true\", got: #{inspect(other)}" + end + end + + defp parse_param("package_prefix=" <> package, ctx) do + if package == "" do + raise "package_prefix can't be empty" + else + %Context{ctx | package_prefix: package} + end + end + + defp parse_param("transform_module=" <> module, ctx) do + %Context{ctx | transform_module: Module.concat([module])} + end + + defp parse_param("one_file_per_module=" <> value, ctx) do + case value do + "true" -> + %Context{ctx | one_file_per_module?: true} + + other -> + raise "invalid value for one_file_per_module option, expected \"true\", got: #{inspect(other)}" + end + end + + # defp parse_param("include_docs=" <> value, ctx) do + # case value do + # "true" -> + # %Context{ctx | include_docs?: true} + + # other -> + # raise "invalid value for include_docs option, expected \"true\", got: #{inspect(other)}" + # end + # end + + defp parse_param(_unknown, ctx) do + ctx + end + + # Made public for testing. + @doc false + @spec find_types(Context.t(), [Google.Protobuf.FileDescriptorProto.t()], [String.t()]) :: + Context.t() + def find_types(%Context{} = ctx, descs, files_to_generate) + when is_list(descs) and is_list(files_to_generate) do + global_type_mapping = + Map.new(descs, fn %Google.Protobuf.FileDescriptorProto{name: filename} = desc -> + {filename, find_types_in_proto(ctx, desc, files_to_generate)} + end) + + %Context{ctx | global_type_mapping: global_type_mapping} + end + + defp find_types_in_proto( + %Context{} = ctx, + %Google.Protobuf.FileDescriptorProto{} = desc, + files_to_generate + ) do + # Only take package_prefix into consideration for files that we're directly generating. + package_prefix = + if desc.name in files_to_generate do + ctx.package_prefix + else + nil + end + + ctx = + %Protobuf.Protoc.Context{ + namespace: [], + package_prefix: package_prefix, + package: desc.package + } + |> Protobuf.Protoc.Context.custom_file_options_from_file_desc(desc) + + find_types_in_descriptor(_types = %{}, ctx, desc.message_type ++ desc.enum_type) + end + + defp find_types_in_descriptor(types_acc, ctx, descs) when is_list(descs) do + Enum.reduce(descs, types_acc, &find_types_in_descriptor(_acc = &2, ctx, _desc = &1)) + end + + defp find_types_in_descriptor( + types_acc, + ctx, + %Google.Protobuf.DescriptorProto{name: name} = desc + ) do + new_ctx = update_in(ctx.namespace, &(&1 ++ [name])) + + types_acc + |> update_types(ctx, name) + |> find_types_in_descriptor(new_ctx, desc.enum_type) + |> find_types_in_descriptor(new_ctx, desc.nested_type) + end + + defp find_types_in_descriptor( + types_acc, + ctx, + %Google.Protobuf.EnumDescriptorProto{name: name} + ) do + update_types(types_acc, ctx, name) + end + + defp update_types(types, %Context{namespace: ns, package: pkg} = ctx, name) do + type_name = Protobuf.Protoc.Generator.Util.mod_name(ctx, ns ++ [name]) + + mapping_name = + ([pkg] ++ ns ++ [name]) + |> Enum.reject(&is_nil/1) + |> Enum.join(".") + + Map.put(types, "." <> mapping_name, %{type_name: type_name}) + end + + if Version.match?(System.version(), "~> 1.13") do + defp binread_all!(device) do + case IO.binread(device, :eof) do + data when is_binary(data) -> data + :eof -> _previous_behavior = "" + other -> raise "reading from #{inspect(device)} failed: #{inspect(other)}" + end + end + else + defp binread_all!(device) do + case IO.binread(device, :all) do + data when is_binary(data) -> data + other -> raise "reading from #{inspect(device)} failed: #{inspect(other)}" + end + end + end +end diff --git a/lib/grpc/protoc/generator.ex b/lib/grpc/protoc/generator.ex new file mode 100644 index 00000000..f7586885 --- /dev/null +++ b/lib/grpc/protoc/generator.ex @@ -0,0 +1,68 @@ +defmodule GRPC.Protoc.Generator do + @moduledoc false + + alias Protobuf.Protoc.Context + alias Protobuf.Protoc.Generator + + @spec generate(Context.t(), %Google.Protobuf.FileDescriptorProto{}) :: + [Google.Protobuf.Compiler.CodeGeneratorResponse.File.t()] + def generate(%Context{} = ctx, %Google.Protobuf.FileDescriptorProto{} = desc) do + module_definitions = + ctx + |> generate_module_definitions(desc) + |> Enum.reject(&is_nil/1) + + if ctx.one_file_per_module? do + Enum.map(module_definitions, fn {mod_name, content} -> + file_name = Macro.underscore(mod_name) <> ".svc.ex" + + %Google.Protobuf.Compiler.CodeGeneratorResponse.File{ + name: file_name, + content: content + } + end) + else + # desc.name is the filename, ending in ".proto". + file_name = Path.rootname(desc.name) <> ".svc.ex" + + content = + module_definitions + |> Enum.map(fn {_mod_name, contents} -> [contents, ?\n] end) + |> IO.iodata_to_binary() + |> Generator.Util.format() + + [ + %Google.Protobuf.Compiler.CodeGeneratorResponse.File{ + name: file_name, + content: content + } + ] + end + end + + defp generate_module_definitions(ctx, %Google.Protobuf.FileDescriptorProto{} = desc) do + ctx = + %Context{ + ctx + | syntax: syntax(desc.syntax), + package: desc.package, + dep_type_mapping: get_dep_type_mapping(ctx, desc.dependency, desc.name) + } + |> Protobuf.Protoc.Context.custom_file_options_from_file_desc(desc) + + Enum.map(desc.service, &GRPC.Protoc.Generator.Service.generate(ctx, &1)) + end + + defp get_dep_type_mapping(%Context{global_type_mapping: global_mapping}, deps, file_name) do + mapping = + Enum.reduce(deps, %{}, fn dep, acc -> + Map.merge(acc, global_mapping[dep]) + end) + + Map.merge(mapping, global_mapping[file_name]) + end + + defp syntax("proto3"), do: :proto3 + defp syntax("proto2"), do: :proto2 + defp syntax(nil), do: :proto2 +end diff --git a/lib/grpc/protoc/generator/service.ex b/lib/grpc/protoc/generator/service.ex new file mode 100644 index 00000000..f7aeea10 --- /dev/null +++ b/lib/grpc/protoc/generator/service.ex @@ -0,0 +1,71 @@ +defmodule GRPC.Protoc.Generator.Service do + @moduledoc false + + alias Protobuf.Protoc.Context + alias Protobuf.Protoc.Generator.Util + + require EEx + + EEx.function_from_file( + :defp, + :service_template, + Path.expand("./templates/service.ex.eex", :code.priv_dir(:grpc)), + [:assigns] + ) + + @spec generate(Context.t(), Google.Protobuf.ServiceDescriptorProto.t()) :: + {String.t(), String.t()} + def generate(%Context{} = ctx, %Google.Protobuf.ServiceDescriptorProto{} = desc) do + # service can't be nested + mod_name = Util.mod_name(ctx, [Macro.camelize(desc.name)]) + name = Util.prepend_package_prefix(ctx.package, desc.name) + methods = Enum.map(desc.method, &generate_service_method(ctx, &1)) + + descriptor_fun_body = + if ctx.gen_descriptors? do + Util.descriptor_fun_body(desc) + else + nil + end + + {mod_name, + Util.format( + service_template( + module: mod_name, + service_name: name, + methods: methods, + descriptor_fun_body: descriptor_fun_body, + version: Util.version(), + module_doc?: ctx.include_docs? + ) + )} + end + + defp generate_service_method(ctx, method) do + input = service_arg(Util.type_from_type_name(ctx, method.input_type), method.client_streaming) + + output = + service_arg(Util.type_from_type_name(ctx, method.output_type), method.server_streaming) + + options = + method.options + |> opts() + |> inspect(limit: :infinity) + + {method.name, input, output, options} + end + + defp service_arg(type, _streaming? = true), do: "stream(#{type})" + defp service_arg(type, _streaming?), do: type + + defp opts(%Google.Protobuf.MethodOptions{__pb_extensions__: extensions}) + when extensions == %{} do + %{} + end + + defp opts(%Google.Protobuf.MethodOptions{__pb_extensions__: extensions}) do + for {{type, field}, value} <- extensions, into: %{} do + {field, %{type: type, value: value}} + end + end +end diff --git a/lib/grpc/server.ex b/lib/grpc/server.ex index f1bacb68..cdd51bc3 100644 --- a/lib/grpc/server.ex +++ b/lib/grpc/server.ex @@ -29,11 +29,92 @@ defmodule GRPC.Server do The request will be a `Enumerable.t`(created by Elixir's `Stream`) of requests if it's streaming. If a reply is streaming, you need to call `send_reply/2` to send replies one by one instead of returning reply in the end. + + ## gRPC HTTP/JSON transcoding + + Transcoding can be enabled by using the option `http_transcode: true`: + + defmodule Greeter.Service do + use GRPC.Service, name: "ping" + + rpc :SayHello, Request, Reply + rpc :SayGoodbye, stream(Request), stream(Reply) + end + + defmodule Greeter.Server do + use GRPC.Server, service: Greeter.Service, http_transcode: true + + def say_hello(request, _stream) do + Reply.new(message: "Hello" <> request.name) + end + + def say_goodbye(request_enum, stream) do + requests = Enum.map request_enum, &(&1) + GRPC.Server.send_reply(stream, reply1) + GRPC.Server.send_reply(stream, reply2) + end + end + + With transcoding enabled gRPC methods can be used over HTTP/1 with JSON i.e + + POST localhost/helloworld.Greeter/SayHello` + Content-Type: application/json + { + "message": "gRPC" + } + + HTTP/1.1 200 OK + Content-Type: application/json + { + "message": "Hello gRPC" + } + + By using `option (google.api.http)` annotations in the `.proto` file the mapping between + HTTP/JSON to gRPC methods and parameters can be customized: + + syntax = "proto3"; + + import "google/api/annotations.proto"; + import "google/protobuf/timestamp.proto"; + + package helloworld; + + service Greeter { + rpc SayHello (HelloRequest) returns (HelloReply) { + option (google.api.http) = { + get: "/v1/greeter/{name}" + }; + } + } + + message HelloRequest { + string name = 1; + } + + message HelloReply { + string message = 1; + } + + In addition to the `POST localhost/helloworld.Greeter/SayHello` route in the previous examples + this creates an additional route: `GET localhost/v1/greeter/:name` + + GET localhost/v1/greeter/gRPC + Accept: application/json + + HTTP/1.1 200 OK + Content-Type: application/json + { + "message": "Hello gRPC" + } + + For more comprehensive documentation on annotation usage in `.proto` files [see](https://cloud.google.com/endpoints/docs/grpc/transcoding) """ require Logger alias GRPC.RPCError + alias GRPC.Server.Router + alias GRPC.Server.Transcode @type rpc_req :: struct | Enumerable.t() @type rpc_return :: struct | any @@ -43,15 +124,35 @@ defmodule GRPC.Server do quote bind_quoted: [opts: opts], location: :keep do service_mod = opts[:service] service_name = service_mod.__meta__(:name) - codecs = opts[:codecs] || [GRPC.Codec.Proto, GRPC.Codec.WebText] + codecs = opts[:codecs] || [GRPC.Codec.Proto, GRPC.Codec.WebText, GRPC.Codec.JSON] compressors = opts[:compressors] || [] + http_transcode = opts[:http_transcode] || false + + codecs = if http_transcode, do: [GRPC.Codec.JSON | codecs], else: codecs + + routes = + for {name, _, _, options} = rpc <- service_mod.__rpc_calls__, reduce: [] do + acc -> + path = "/#{service_name}/#{name}" - Enum.each(service_mod.__rpc_calls__, fn {name, _, _} = rpc -> + acc = + if http_transcode and Map.has_key?(options, :http) do + %{value: http_rule} = GRPC.Service.rpc_options(rpc, :http) + route = Macro.escape({:http_transcode, Router.build_route(http_rule)}) + [route | acc] + else + acc + end + + [{:grpc, path} | acc] + end + + Enum.each(service_mod.__rpc_calls__, fn {name, _, _, options} = rpc -> func_name = name |> to_string |> Macro.underscore() |> String.to_atom() path = "/#{service_name}/#{name}" grpc_type = GRPC.Service.grpc_type(rpc) - def __call_rpc__(unquote(path), stream) do + def __call_rpc__(unquote(path), :post, stream) do GRPC.Server.call( unquote(service_mod), %{ @@ -64,15 +165,41 @@ defmodule GRPC.Server do unquote(func_name) ) end + + if http_transcode and Map.has_key?(options, :http) do + %{value: http_rule} = GRPC.Service.rpc_options(rpc, :http) + {http_method, http_path, _matches} = Router.build_route(http_rule) + + def __call_rpc__(unquote(http_path), unquote(http_method), stream) do + GRPC.Server.call( + unquote(service_mod), + %{ + stream + | service_name: unquote(service_name), + method_name: unquote(to_string(name)), + grpc_type: unquote(grpc_type), + http_method: unquote(http_method), + http_transcode: unquote(http_transcode) + }, + unquote(Macro.escape(put_elem(rpc, 0, func_name))), + unquote(func_name) + ) + end + end end) def __call_rpc__(_, stream) do raise GRPC.RPCError, status: :unimplemented end + def service_name(_) do + "" + end + def __meta__(:service), do: unquote(service_mod) def __meta__(:codecs), do: unquote(codecs) def __meta__(:compressors), do: unquote(compressors) + def __meta__(:routes), do: unquote(routes) end end @@ -82,7 +209,7 @@ defmodule GRPC.Server do def call( _service_mod, stream, - {_, {req_mod, req_stream}, {res_mod, res_stream}} = rpc, + {_, {req_mod, req_stream}, {res_mod, res_stream}, _options} = rpc, func_name ) do request_id = generate_request_id() @@ -116,6 +243,34 @@ defmodule GRPC.Server do end end + defp do_handle_request( + false, + res_stream, + %{ + rpc: rpc, + request_mod: req_mod, + codec: codec, + adapter: adapter, + payload: payload, + http_transcode: true + } = stream, + func_name + ) do + {:ok, data} = adapter.read_body(payload) + request_body = codec.decode(data, req_mod) + rule = GRPC.Service.rpc_options(rpc, :http) || %{value: %{}} + bindings = adapter.get_bindings(payload) + qs = adapter.get_qs(payload) + + case Transcode.map_request(rule.value, request_body, bindings, qs, req_mod) do + {:ok, request} -> + call_with_interceptors(res_stream, func_name, stream, request) + + resp = {:error, _} -> + resp + end + end + defp do_handle_request( false, res_stream, @@ -292,7 +447,11 @@ defmodule GRPC.Server do iex> GRPC.Server.send_reply(stream, reply) """ @spec send_reply(GRPC.Server.Stream.t(), struct()) :: GRPC.Server.Stream.t() - def send_reply(%{__interface__: interface} = stream, reply, opts \\ []) do + def send_reply( + %{__interface__: interface} = stream, + reply, + opts \\ [] + ) do interface[:send_reply].(stream, reply, opts) end @@ -343,13 +502,6 @@ defmodule GRPC.Server do stream end - @doc false - @spec service_name(String.t()) :: String.t() - def service_name(path) do - ["", name | _] = String.split(path, "/") - name - end - @doc false @spec servers_to_map(module() | [module()]) :: %{String.t() => [module()]} def servers_to_map(servers) do diff --git a/lib/grpc/server/adapters/cowboy.ex b/lib/grpc/server/adapters/cowboy.ex index cb78eea2..216aa924 100644 --- a/lib/grpc/server/adapters/cowboy.ex +++ b/lib/grpc/server/adapters/cowboy.ex @@ -150,7 +150,8 @@ defmodule GRPC.Server.Adapters.Cowboy do @impl true def send_reply(%{pid: pid}, data, opts) do - Handler.stream_body(pid, data, opts, :nofin) + http_transcode = Keyword.get(opts, :http_transcode) + Handler.stream_body(pid, data, opts, :nofin, http_transcode) end @impl true @@ -182,27 +183,50 @@ defmodule GRPC.Server.Adapters.Cowboy do Handler.get_cert(pid) end + def get_qs(%{pid: pid}) do + Handler.get_qs(pid) + end + + def get_bindings(%{pid: pid}) do + Handler.get_bindings(pid) + end + def set_compressor(%{pid: pid}, compressor) do Handler.set_compressor(pid, compressor) end + defp build_handlers(endpoint, servers, opts) do + Enum.flat_map(servers, fn {_name, server_mod} = server -> + routes = server_mod.__meta__(:routes) + Enum.map(routes, &build_route(&1, endpoint, server, opts)) + end) + end + + defp build_route({:grpc, path}, endpoint, server, opts) do + {path, GRPC.Server.Adapters.Cowboy.Handler, {endpoint, server, path, Enum.into(opts, %{})}} + end + + defp build_route({:http_transcode, {_method, path, match}}, endpoint, server, opts) do + {match, GRPC.Server.Adapters.Cowboy.Handler, {endpoint, server, path, Enum.into(opts, %{})}} + end + defp cowboy_start_args(endpoint, servers, port, opts) do # Custom handler to be able to listen in the same port, more info: # https://github.com/containous/traefik/issues/6211 {adapter_opts, opts} = Keyword.pop(opts, :adapter_opts, []) status_handler = Keyword.get(adapter_opts, :status_handler) + handlers = build_handlers(endpoint, servers, opts) + handlers = if status_handler do - [ - status_handler, - {:_, GRPC.Server.Adapters.Cowboy.Handler, {endpoint, servers, Enum.into(opts, %{})}} - ] + [status_handler | handlers] else - [{:_, GRPC.Server.Adapters.Cowboy.Handler, {endpoint, servers, Enum.into(opts, %{})}}] + handlers end - dispatch = :cowboy_router.compile([{:_, handlers}]) + dispatch = GRPC.Server.Adapters.Cowboy.Router.compile([{:_, handlers}]) + idle_timeout = Keyword.get(opts, :idle_timeout) || :infinity num_acceptors = Keyword.get(opts, :num_acceptors) || @default_num_acceptors max_connections = Keyword.get(opts, :max_connections) || @default_max_connections diff --git a/lib/grpc/server/adapters/cowboy/handler.ex b/lib/grpc/server/adapters/cowboy/handler.ex index e7b7355e..c4189dc4 100644 --- a/lib/grpc/server/adapters/cowboy/handler.ex +++ b/lib/grpc/server/adapters/cowboy/handler.ex @@ -13,14 +13,19 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do @spec init( map(), - state :: {endpoint :: atom(), servers :: %{String.t() => [module()]}, opts :: keyword()} + state :: + {endpoint :: atom(), server :: {String.t(), module()}, route :: String.t(), + opts :: keyword()} ) :: {:cowboy_loop, map(), map()} - def init(req, {endpoint, servers, opts} = state) do - path = :cowboy_req.path(req) - - with {:ok, server} <- find_server(servers, path), - {:ok, codec} <- find_codec(req, server), - # can be nil + def init(req, {endpoint, {_name, server}, route, opts} = state) do + http_method = + req + |> :cowboy_req.method() + |> String.downcase() + |> String.to_existing_atom() + + with {:ok, sub_type, content_type} <- find_content_type_subtype(req), + {:ok, codec} <- find_codec(sub_type, content_type, server), {:ok, compressor} <- find_compressor(req, server) do stream = %GRPC.Server.Stream{ server: server, @@ -29,10 +34,12 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do payload: %{pid: self()}, local: opts[:local], codec: codec, - compressor: compressor + http_method: http_method, + compressor: compressor, + http_transcode: transcode?(req) } - pid = spawn_link(__MODULE__, :call_rpc, [server, path, stream]) + pid = spawn_link(__MODULE__, :call_rpc, [server, route, stream]) Process.flag(:trap_exit, true) req = :cowboy_req.set_resp_headers(HTTP2.server_headers(stream), req) @@ -51,40 +58,43 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do {:cowboy_loop, req, %{pid: pid, handling_timer: timer_ref, pending_reader: nil}} else {:error, error} -> + Logger.error(fn -> inspect(error) end) trailers = HTTP2.server_trailers(error.status, error.message) - req = send_error_trailers(req, trailers) + req = send_error_trailers(req, 200, trailers) {:ok, req, state} end end - defp find_server(servers, path) do - case Map.fetch(servers, GRPC.Server.service_name(path)) do - s = {:ok, _} -> - s - - _ -> - {:error, RPCError.exception(status: :unimplemented)} - end - end - - defp find_codec(req, server) do - req_content_type = :cowboy_req.header("content-type", req) - - {:ok, subtype} = extract_subtype(req_content_type) - codec = Enum.find(server.__meta__(:codecs), nil, fn c -> c.name() == subtype end) - - if codec do + defp find_codec(subtype, content_type, server) do + if codec = Enum.find(server.__meta__(:codecs), nil, fn c -> c.name() == subtype end) do {:ok, codec} else - # TODO: Send grpc-accept-encoding header {:error, RPCError.exception( status: :unimplemented, - message: "No codec registered for content-type #{req_content_type}" + message: "No codec registered for content-type #{content_type}" )} end end + defp find_content_type_subtype(req) do + content_type = + case :cowboy_req.header("content-type", req) do + :undefined -> + :cowboy_req.header("accept", req) + + content_type -> + content_type + end + + find_subtype(content_type) + end + + defp find_subtype(content_type) do + {:ok, subtype} = extract_subtype(content_type) + {:ok, subtype, content_type} + end + defp find_compressor(req, server) do encoding = :cowboy_req.header("grpc-encoding", req) @@ -114,8 +124,8 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do sync_call(pid, :read_body) end - def stream_body(pid, data, opts, is_fin) do - send(pid, {:stream_body, data, opts, is_fin}) + def stream_body(pid, data, opts, is_fin, http_transcode \\ false) do + send(pid, {:stream_body, data, opts, is_fin, http_transcode}) end def stream_reply(pid, status, headers) do @@ -150,6 +160,14 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do sync_call(pid, :get_cert) end + def get_qs(pid) do + sync_call(pid, :get_qs) + end + + def get_bindings(pid) do + sync_call(pid, :get_bindings) + end + defp sync_call(pid, key) do ref = make_ref() send(pid, {key, ref, self()}) @@ -225,7 +243,27 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do {:ok, req, state} end - def info({:stream_body, data, opts, is_fin}, req, state) do + def info({:get_qs, ref, pid}, req, state) do + qs = :cowboy_req.qs(req) + send(pid, {ref, qs}) + {:ok, req, state} + end + + def info({:get_bindings, ref, pid}, req, state) do + bindings = :cowboy_req.bindings(req) + send(pid, {ref, bindings}) + {:ok, req, state} + end + + # Handle http/json transcoded response + def info({:stream_body, data, _opts, is_fin, _http_transcode = true}, req, state) do + # TODO Compress + req = check_sent_resp(req) + :cowboy_req.stream_body(data, is_fin, req) + {:ok, req, state} + end + + def info({:stream_body, data, opts, is_fin, _}, req, state) do # If compressor exists, compress is true by default compressor = if opts[:compress] == false do @@ -247,7 +285,9 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do msg = "A unaccepted encoding #{compressor.name()} is set, valid are: #{:cowboy_req.header("grpc-accept-encoding", req)}" - req = send_error(req, state, msg) + error = RPCError.exception(status: :internal, message: msg) + req = send_error(req, error, state, :rpc_error) + {:stop, req, state} else case GRPC.Message.to_data(data, compressor: compressor, codec: opts[:codec]) do @@ -257,7 +297,8 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do {:ok, req, state} {:error, msg} -> - req = send_error(req, state, msg) + error = RPCError.exception(status: :internal, message: msg) + req = send_error(req, error, state, :rpc_error) {:stop, req, state} end end @@ -284,11 +325,10 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do {:ok, req, state} end - def info({:handling_timeout, _}, req, state = %{pid: pid}) do + def info({:handling_timeout, _}, req, state) do error = %RPCError{status: GRPC.Status.deadline_exceeded(), message: "Deadline expired"} - trailers = HTTP2.server_trailers(error.status, error.message) - exit_handler(pid, :timeout) - req = send_error_trailers(req, trailers) + req = send_error(req, error, state, :timeout) + {:stop, req, state} end @@ -310,27 +350,26 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do # expected error raised from user to return error immediately def info({:EXIT, pid, {%RPCError{} = error, _stacktrace}}, req, state = %{pid: pid}) do - trailers = HTTP2.server_trailers(error.status, error.message) - exit_handler(pid, :rpc_error) - req = send_error_trailers(req, trailers) + req = send_error(req, error, state, :rpc_error) {:stop, req, state} end # unknown error raised from rpc - def info({:EXIT, pid, {:handle_error, _kind}}, req, state = %{pid: pid}) do + def info({:EXIT, pid, {:handle_error, _kind}} = err, req, state = %{pid: pid}) do + Logger.warning("3. #{inspect(state)} #{inspect(err)}") + error = %RPCError{status: GRPC.Status.unknown(), message: "Internal Server Error"} - trailers = HTTP2.server_trailers(error.status, error.message) - exit_handler(pid, :error) - req = send_error_trailers(req, trailers) + req = send_error(req, error, state, :error) + {:stop, req, state} end def info({:EXIT, pid, {reason, stacktrace}}, req, state = %{pid: pid}) do Logger.error(Exception.format(:error, reason, stacktrace)) + error = %RPCError{status: GRPC.Status.unknown(), message: "Internal Server Error"} - trailers = HTTP2.server_trailers(error.status, error.message) - exit_handler(pid, reason) - req = send_error_trailers(req, trailers) + req = send_error(req, error, state, reason) + {:stop, req, state} end @@ -372,8 +411,8 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do end end - defp do_call_rpc(server, path, stream) do - result = server.__call_rpc__(path, stream) + defp do_call_rpc(server, path, %{http_method: http_method} = stream) do + result = server.__call_rpc__(path, http_method, stream) case result do {:ok, stream, response} -> @@ -414,12 +453,12 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do :cowboy_req.stream_reply(200, req) end - defp send_error_trailers(%{has_sent_resp: _} = req, trailers) do + defp send_error_trailers(%{has_sent_resp: _} = req, _, trailers) do :cowboy_req.stream_trailers(trailers, req) end - defp send_error_trailers(req, trailers) do - :cowboy_req.reply(200, trailers, req) + defp send_error_trailers(req, status, trailers) do + :cowboy_req.reply(status, trailers, req) end def exit_handler(pid, reason) do @@ -444,6 +483,7 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do end end + defp extract_subtype("application/json"), do: {:ok, "json"} defp extract_subtype("application/grpc"), do: {:ok, "proto"} defp extract_subtype("application/grpc+"), do: {:ok, "proto"} defp extract_subtype("application/grpc;"), do: {:ok, "proto"} @@ -462,12 +502,25 @@ defmodule GRPC.Server.Adapters.Cowboy.Handler do {:ok, "proto"} end - defp send_error(req, %{pid: pid}, msg) do - error = RPCError.exception(status: :internal, message: msg) + defp transcode?(%{version: "HTTP/1.1"}), do: true + + defp transcode?(req) do + case find_content_type_subtype(req) do + {:ok, "json", _} -> true + _ -> false + end + end + + defp send_error(req, error, state, reason) do trailers = HTTP2.server_trailers(error.status, error.message) - exit_handler(pid, :rpc_error) - send_error_trailers(req, trailers) + status = if transcode?(req), do: GRPC.Status.http_code(error.status), else: 200 + + if pid = Map.get(state, :pid) do + exit_handler(pid, reason) + end + + send_error_trailers(req, status, trailers) end # Similar with cowboy's read_body, but we need to receive the message diff --git a/lib/grpc/server/adapters/cowboy/router.ex b/lib/grpc/server/adapters/cowboy/router.ex new file mode 100644 index 00000000..7180cd7a --- /dev/null +++ b/lib/grpc/server/adapters/cowboy/router.ex @@ -0,0 +1,145 @@ +defmodule GRPC.Server.Adapters.Cowboy.Router do + # Most of the functionality in this module is lifted from :cowboy_router, with the unused parts + # removed. Since the template language for Google.Api.HttpRule is quite rich, it cannot be expressed + # in terms of the default routing offered by cowboy. + # This module is configured to be used as middleware in `src/grpc_stream_h.erl` instead of :cowoby_router + @moduledoc false + @behaviour :cowboy_middleware + + alias GRPC.Server.Router + + @dialyzer {:nowarn_function, compile: 1} + + def compile(routes) do + for {host, paths} <- routes do + [{host_match, _, _}] = :cowboy_router.compile([{host, []}]) + compiled_paths = compile_paths(paths, []) + + {host_match, [], compiled_paths} + end + end + + def compile_paths([], acc) do + Enum.reverse(acc) + end + + def compile_paths([{path, handler, opts} | paths], acc) when is_binary(path) do + {_, _, matches} = Router.build_route(path) + + compile_paths(paths, [{matches, [], handler, opts} | acc]) + end + + def compile_paths([{route, handler, opts} | paths], acc) do + compile_paths(paths, [{route, [], handler, opts} | acc]) + end + + @impl :cowboy_middleware + def execute( + req = %{host: host, path: path}, + env = %{dispatch: dispatch} + ) do + dispatch = + case dispatch do + {:persistent_term, key} -> + :persistent_term.get(key) + + _ -> + dispatch + end + + case match(dispatch, host, path) do + {:ok, handler, handler_opts, bindings, host_info, path_info} -> + {:ok, Map.merge(req, %{host_info: host_info, path_info: path_info, bindings: bindings}), + Map.merge(env, %{handler: handler, handler_opts: handler_opts})} + + {:error, :notfound, :host} -> + {:stop, :cowboy_req.reply(400, req)} + + {:error, :badrequest, :path} -> + {:stop, :cowboy_req.reply(400, req)} + + {:error, :notfound, :path} -> + {:stop, :cowboy_req.reply(404, req)} + end + end + + def match([], _, _) do + {:error, :notfound, :host} + end + + def match([{:_, [], path_matchs} | _Tail], _, path) do + match_path(path_matchs, :undefined, path, %{}) + end + + defp match_path([], _, _, _) do + {:error, :notfound, :path} + end + + defp match_path([{:_, [], handler, opts} | _Tail], host_info, _, bindings) do + {:ok, handler, opts, bindings, host_info, :undefined} + end + + defp match_path([{"*", _, handler, opts} | _Tail], host_info, "*", bindings) do + {:ok, handler, opts, bindings, host_info, :undefined} + end + + defp match_path([_ | tail], host_info, "*", bindings) do + match_path(tail, host_info, "*", bindings) + end + + defp match_path([{path_match, fields, handler, opts} | tail], host_info, tokens, bindings) + when is_list(tokens) do + case Router.match(tokens, path_match, bindings) do + false -> + match_path(tail, host_info, tokens, bindings) + + {true, path_binds} -> + case check_constraints(fields, path_binds) do + {:ok, path_binds} -> + {:ok, handler, opts, path_binds, host_info, :undefined} + + :nomatch -> + match_path(tail, host_info, tokens, bindings) + end + end + end + + defp match_path(_Dispatch, _HostInfo, :badrequest, _Bindings) do + {:error, :badrequest, :path} + end + + defp match_path(dispatch, host_info, path, bindings) do + match_path(dispatch, host_info, Router.split_path(path), bindings) + end + + defp check_constraints([], bindings) do + {:ok, bindings} + end + + defp check_constraints([field | tail], bindings) when is_atom(field) do + check_constraints(tail, bindings) + end + + defp check_constraints([field | tail], bindings) do + name = elem(field, 0) + + case bindings do + %{^name => value} -> + constraints = elem(field, 1) + + case :cowboy_constraints.validate( + value, + constraints + ) do + {:ok, value} -> + check_constraints(tail, Map.put(bindings, name, value)) + + {:error, _} -> + :nomatch + end + + _ -> + check_constraints(tail, bindings) + end + end +end diff --git a/lib/grpc/server/router.ex b/lib/grpc/server/router.ex new file mode 100644 index 00000000..38b00311 --- /dev/null +++ b/lib/grpc/server/router.ex @@ -0,0 +1,214 @@ +defmodule GRPC.Server.Router do + @moduledoc """ + """ + alias __MODULE__.Template + + @type http_method :: :get | :put | :post | :patch | :delete + @type route :: {http_method(), String.t(), Template.matchers()} + + @wildcards [:_, :__] + + @spec build_route(binary() | map()) :: route() + def build_route(path) when is_binary(path), do: build_route(:post, path) + def build_route(%{pattern: {method, path}}), do: build_route(method, path) + + @doc """ + Builds a t:route/0 from a URL path or `t:Google.Api.Http.t/0`. + + The matcher part in the route can be used in `match/3` to match on a URL path or a list of segments. + + ## Examples + + {:get, "/v1/messages/{message_id}", match} = GRPC.Server.Router.build_route(:get, "/v1/messages/{message_id}") + + {:get, path, match} = GRPC.Server.Router.build_route(:get, "/v1/{book.location=shelves/*}/books/{book.name=*}") + {true, %{"book.location": "shelves/example-shelf", "book.name": "example-book"}} = GRPC.Server.Router.match("/v1/shelves/example-shelf/books/example-book", match, []) + """ + @spec build_route(atom(), binary()) :: route() + def build_route(method, path) when is_binary(path) do + match = + path + |> Template.tokenize([]) + |> Template.parse([]) + + {method, path, match} + end + + @doc """ + Split URL path into segments, removing the leading and trailing slash. + + ## Examples + + ["v1", "messages"] = GRPC.Server.Router.split_path("/v1/messages") + """ + @spec split_path(String.t()) :: iolist() + def split_path(bin) do + for segment <- String.split(bin, "/"), segment != "", do: segment + end + + @doc """ + Matches a URL path or URL segements against a compiled route matcher. Matched bindings from the segments are extracted + into a map. If the same variable name is used in multiple bindings, the value must match otherwise the route is not considered a match. + + ## Examples + + {_, _, match} = GRPC.Server.Router.build_route(:get, "/v1/{name=messages}") + {true, %{name: "messages"}} = GRPC.Server.Router.match("/v1/messages", match) + false = GRPC.Server.Router.match("/v1/messages/foobar", match) + + + {_, _, match} = GRPC.Server.Router.build_route(:get, "/v1/{name=shelves/*/books/*) + {true, %{name: "shelves/books/book"}} = GRPC.Server.Router.match("/v1/shelves/example-shelf/books/book", match) + + false = GRPC.Server.Router.match("/v1/shelves/example-shelf/something-els/books/book", match) + + """ + @spec match(String.t() | [String.t()], Template.matchers()) :: {true, map()} | false + def match(path, match) do + match(path, match, %{}) + end + + @spec match(String.t() | [String.t()], Template.matchers(), map()) :: {true, map()} | false + def match(path, match, bindings) when is_binary(path) do + path + |> split_path() + |> match(match, bindings) + end + + # The last matcher is a 'catch all' matcher, so the rest of segments are matching. + def match(_segments, [{:__, []}], bindings) do + {true, bindings} + end + + # 'Any' matcher matches a single segment, cont. recursion. + def match([_s | segments], [{:_, _} | matchers], bindings) do + match(segments, matchers, bindings) + end + + # Matching against a 'literal' match, cont. recursion + def match([segment | segments], [_literal = segment | matchers], bindings) do + match(segments, matchers, bindings) + end + + # /v1/{a=*} is the same as /v1/{a}. Matching and binding the segment to `binding` + def match([segment | tail], [{binding, [{:_, _}]} | matchers], bindings) do + put_binding(bindings, binding, segment, tail, matchers) + end + + # /v1/{a=messages} binding a matching literal + def match([segment | segments], [{binding, [segment]} | matchers], bindings) do + put_binding(bindings, binding, segment, segments, matchers) + end + + # /v1/{a=*} /v1/{a=**} theres no more matchers after the wildcard, bind + # the rest of the segments to `binding` + def match(rest, [{binding, [{any, _}]}], bindings) when any in @wildcards do + value = Enum.join(rest, "/") + + match([], [], Map.put(bindings, binding, value)) + end + + # /v1/{a=messages/*} /v1/{a=messages/**} theres no more matchers after the wildcard, bind + # the rest of the segments including the current segment to `binding` + def match([segment | _] = segments, [{binding, [segment, {any, _}]}], bindings) + when any in @wildcards do + value = Enum.join(segments, "/") + + match([], [], Map.put(bindings, binding, value)) + end + + # /v1/{a=users/*/messages/*}/suffix. There are sub-matches inside the capture + # so the segments are matched with match submatches until an end-condition + # is reached + def match( + [segment | tail], + [{binding, [segment | sub_matches]} | matches], + bindings + ) do + end_condition = + case matches do + [next | _] -> next + [] -> :undefined + end + + with {matched_segments, tail} <- match_until(tail, end_condition, sub_matches, []) do + value = Enum.join([segment | matched_segments], "/") + bindings = Map.put(bindings, binding, value) + + match(tail, matches, bindings) + end + end + + # /v1/messages/{message_id} simple binding + def match([segment | segments], [{binding, []} | matchers], bindings) when is_atom(binding) do + put_binding(bindings, binding, segment, segments, matchers) + end + + def match([], [], bindings) do + {true, bindings} + end + + # no match + def match(_segments, _matches, _bindings) do + false + end + + # End recursion, since there's no "outside" matches we should iterate to end of segments + defp match_until([], :undefined, [], acc) do + {Enum.reverse(acc), []} + end + + # End recursion, end condition is a binding with a matching complex start segment + defp match_until( + [segment | _] = segments, + _end_condition = {binding, [segment | _]}, + [], + acc + ) + when is_atom(binding) do + {Enum.reverse(acc), segments} + end + + # End recursion since the submatch contains a trailing wildcard but we have more matches "outside" this sub-segment + defp match_until([segment | _] = segments, _end_condition = segment, [], acc) do + {Enum.reverse(acc), segments} + end + + # Reached the "end" of this wildcard, so we proceed with the next match + defp match_until([_segment | _] = segments, end_condition, [{:__, []}, match | matches], acc) do + match_until(segments, end_condition, [match | matches], acc) + end + + # Segment is matching the wildcard and have not reached "end" of wildcard + defp match_until([segment | segments], end_condition, [{:__, []} | _] = matches, acc) do + match_until(segments, end_condition, matches, [segment | acc]) + end + + # Current match is matching segment, add to accumulator and set next match as the current one + defp match_until([segment | segments], end_condition, [segment | matches], acc) do + match_until(segments, end_condition, matches, [segment | acc]) + end + + # 'Any' match is matching first segment, add to accumulator and set next match as the current one + defp match_until([segment | segments], end_condition, [{:_, []} | matches], acc) do + match_until(segments, end_condition, matches, [segment | acc]) + end + + # No match + defp match_until(_segments, _end_condition, _matches, _acc) do + false + end + + defp put_binding(bindings, binding, value, segments, matchers) do + case bindings do + %{^binding => ^value} -> + match(segments, matchers, bindings) + + %{^binding => _} -> + false + + _ -> + match(segments, matchers, Map.put(bindings, binding, value)) + end + end +end diff --git a/lib/grpc/server/router/field_path.ex b/lib/grpc/server/router/field_path.ex new file mode 100644 index 00000000..d82038ef --- /dev/null +++ b/lib/grpc/server/router/field_path.ex @@ -0,0 +1,20 @@ +defmodule GRPC.Server.Router.FieldPath do + @moduledoc false + + @spec decode_pair({binary(), term()}, map()) :: map() + def decode_pair({key, value}, acc) do + parts = :binary.split(key, ".", [:global]) + assign_map(parts, value, acc) + end + + defp assign_map(parts, value, acc) do + {_, acc} = + get_and_update_in(acc, Enum.map(parts, &Access.key(&1, %{})), fn + prev when prev == %{} -> {prev, value} + prev when is_list(prev) -> {prev, [value | prev]} + prev -> {prev, [value, prev]} + end) + + acc + end +end diff --git a/lib/grpc/server/router/query.ex b/lib/grpc/server/router/query.ex new file mode 100644 index 00000000..409ba269 --- /dev/null +++ b/lib/grpc/server/router/query.ex @@ -0,0 +1,50 @@ +defmodule GRPC.Server.Router.Query do + @moduledoc false + # This module is based on https://github.com/elixir-plug/plug/blob/main/lib/plug/conn/query.ex + # Decoding of URL-encoded queries as per the rules outlined in the documentation for [`google.api.HttpRule`](https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule) + # It provides similar functionality to `URI.decode_query/3` or `Plug.Conn.Query.decode/4` with the following differences: + + # 1. A repeated key is treated as a list of values + # 1. Sub-paths on the form `path.subpath` are decoded as nested maps + # 1. Sub-paths with the same leaf key are decoded as a list + + alias GRPC.Server.Router.FieldPath + + @spec decode(String.t(), map()) :: %{optional(String.t()) => term()} + def decode(query, acc \\ %{}) + + def decode("", acc) do + acc + end + + def decode(query, acc) when is_binary(query) do + parts = :binary.split(query, "&", [:global]) + + Enum.reduce( + Enum.reverse(parts), + acc, + &decode_www_pair(&1, &2) + ) + end + + defp decode_www_pair("", acc) do + acc + end + + defp decode_www_pair(binary, acc) do + current = + case :binary.split(binary, "=") do + [key, value] -> + {decode_www_form(key), decode_www_form(value)} + + [key] -> + {decode_www_form(key), ""} + end + + FieldPath.decode_pair(current, acc) + end + + defp decode_www_form(value) do + URI.decode_www_form(value) + end +end diff --git a/lib/grpc/server/router/template.ex b/lib/grpc/server/router/template.ex new file mode 100644 index 00000000..15cd7459 --- /dev/null +++ b/lib/grpc/server/router/template.ex @@ -0,0 +1,102 @@ +defmodule GRPC.Server.Router.Template do + @moduledoc false + # https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule + # Template = "/" Segments [ Verb ] ; + # Segments = Segment { "/" Segment } ; + # Segment = "*" | "**" | LITERAL | Variable ; + # Variable = "{" FieldPath [ "=" Segments ] "}" ; + # FieldPath = IDENT { "." IDENT } ; + # Verb = ":" LITERAL ; + @type segment_match :: String.t() | {atom(), [segment_match]} + @type matchers :: [segment_match] + + @spec tokenize(binary(), [tuple()]) :: [tuple()] + def tokenize(path, tokens \\ []) + + def tokenize(<<>>, tokens) do + Enum.reverse(tokens) + end + + def tokenize(segments, tokens) do + {token, rest} = do_tokenize(segments, <<>>) + tokenize(rest, [token | tokens]) + end + + @terminals [?/, ?{, ?}, ?=, ?*] + defp do_tokenize(<>, <<>>) when h in @terminals do + {{List.to_atom([h]), []}, t} + end + + defp do_tokenize(<> = rest, acc) when h in @terminals do + {{:identifier, acc, []}, rest} + end + + defp do_tokenize(<>, acc) + when h in ?a..?z or h in ?A..?Z or h in ?0..?9 or h == ?_ or h == ?. do + do_tokenize(t, <>) + end + + defp do_tokenize(<<>>, acc) do + {{:identifier, acc, []}, <<>>} + end + + @spec parse(tokens :: [tuple()], matchers()) :: matchers() | {matchers, tokens :: [tuple()]} + def parse([], matchers) do + Enum.reverse(matchers) + end + + def parse([{:/, _} | rest], matchers) do + parse(rest, matchers) + end + + def parse([{:*, _}, {:*, _} | rest], matchers) do + parse(rest, [{:__, []} | matchers]) + end + + def parse([{:*, _} | rest], matchers) do + parse(rest, [{:_, []} | matchers]) + end + + def parse([{:identifier, identifier, _} | rest], matchers) do + parse(rest, [identifier | matchers]) + end + + def parse([{:"{", _} | rest], matchers) do + {matchers, rest} = parse_binding(rest, matchers) + parse(rest, matchers) + end + + def parse([{:"}", _} | _rest] = acc, matchers) do + {matchers, acc} + end + + defp parse_binding([], matchers) do + {matchers, []} + end + + defp parse_binding([{:"}", []} | rest], matchers) do + {matchers, rest} + end + + defp parse_binding( + [{:identifier, id, _}, {:=, _} | rest], + matchers + ) do + variable = field_path(id) + {assign, rest} = parse(rest, []) + + parse_binding(rest, [{variable, Enum.reverse(assign)} | matchers]) + end + + defp parse_binding([{:identifier, id, []} | rest], matchers) do + variable = field_path(id) + parse_binding(rest, [{variable, []} | matchers]) + end + + defp field_path(identifier) do + String.to_existing_atom(identifier) + rescue + _e -> + String.to_atom(identifier) + end +end diff --git a/lib/grpc/server/stream.ex b/lib/grpc/server/stream.ex index bd349722..1c0df8c9 100644 --- a/lib/grpc/server/stream.ex +++ b/lib/grpc/server/stream.ex @@ -34,6 +34,9 @@ defmodule GRPC.Server.Stream do # compressor mainly is used in client decompressing, responses compressing should be set by # `GRPC.Server.set_compressor` compressor: module() | nil, + # For http transcoding + http_method: GRPC.Server.Router.http_method(), + http_transcode: boolean(), __interface__: map() } @@ -51,12 +54,44 @@ defmodule GRPC.Server.Stream do adapter: nil, local: nil, compressor: nil, + http_method: :post, + http_transcode: false, __interface__: %{send_reply: &__MODULE__.send_reply/3} - def send_reply(%{adapter: adapter, codec: codec} = stream, reply, opts) do - # {:ok, data, _size} = reply |> codec.encode() |> GRPC.Message.to_data() - data = codec.encode(reply) - adapter.send_reply(stream.payload, data, Keyword.put(opts, :codec, codec)) + def send_reply( + %{grpc_type: :server_stream, codec: codec, http_transcode: true, rpc: rpc} = stream, + reply, + opts + ) do + rule = GRPC.Service.rpc_options(rpc, :http) || %{value: %{}} + response = GRPC.Server.Transcode.map_response_body(rule.value, reply) + + do_send_reply(stream, [codec.encode(response), "\n"], opts) + end + + def send_reply(%{codec: codec, http_transcode: true, rpc: rpc} = stream, reply, opts) do + rule = GRPC.Service.rpc_options(rpc, :http) || %{value: %{}} + response = GRPC.Server.Transcode.map_response_body(rule.value, reply) + + do_send_reply(stream, codec.encode(response), opts) + end + + def send_reply(%{codec: codec} = stream, reply, opts) do + do_send_reply(stream, codec.encode(reply), opts) + end + + defp do_send_reply( + %{adapter: adapter, codec: codec, http_transcode: http_transcode} = stream, + data, + opts + ) do + opts = + opts + |> Keyword.put(:codec, codec) + |> Keyword.put(:http_transcode, http_transcode) + + adapter.send_reply(stream.payload, data, opts) + stream end end diff --git a/lib/grpc/server/transcode.ex b/lib/grpc/server/transcode.ex new file mode 100644 index 00000000..1d2562c5 --- /dev/null +++ b/lib/grpc/server/transcode.ex @@ -0,0 +1,81 @@ +defmodule GRPC.Server.Transcode do + @moduledoc false + alias GRPC.Server.Router.Query + alias GRPC.Server.Router.FieldPath + + @type t :: map() + # The request mapping follow the following rules: + # + # 1. Fields referred by the path template. They are passed via the URL path. + # 2. Fields referred by the HttpRule.body. They are passed via the HTTP request body. + # 3. All other fields are passed via the URL query parameters, and the parameter name is the field path in the request message. A repeated field can be represented as multiple query parameters under the same name. + # If HttpRule.body is "*", there is no URL query parameter, all fields are passed via URL path and HTTP request body. + # If HttpRule.body is omitted, there is no HTTP request body, all fields are passed via URL path and URL query parameters. + @spec map_request(t(), map(), map(), String.t(), module()) :: + {:ok, struct()} | {:error, term()} + def map_request( + %{body: ""}, + _body_request, + path_bindings, + query_string, + req_mod + ) do + path_bindings = map_path_bindings(path_bindings) + query = Query.decode(query_string) + request = Map.merge(path_bindings, query) + + Protobuf.JSON.from_decoded(request, req_mod) + end + + def map_request( + %{body: "*"} = rule, + body_request, + path_bindings, + _query_string, + req_mod + ) do + path_bindings = map_path_bindings(path_bindings) + body_request = map_request_body(rule, body_request) + request = Map.merge(path_bindings, body_request) + + Protobuf.JSON.from_decoded(request, req_mod) + end + + def map_request( + %{} = rule, + body_request, + path_bindings, + query_string, + req_mod + ) do + path_bindings = map_path_bindings(path_bindings) + query = Query.decode(query_string) + body_request = map_request_body(rule, body_request) + request = Enum.reduce([query, body_request], path_bindings, &Map.merge(&2, &1)) + + Protobuf.JSON.from_decoded(request, req_mod) + end + + defp map_request_body(%{body: "*"}, request_body), do: request_body + defp map_request_body(%{body: ""}, request_body), do: request_body + + defp map_request_body(%{body: field}, request_body), + do: %{field => request_body} + + @spec map_response_body(t() | map(), map()) :: map() + def map_response_body(%{response_body: ""}, response_body), do: response_body + + def map_response_body(%{response_body: field}, response_body) do + key = String.to_existing_atom(field) + Map.get(response_body, key) + end + + @spec map_path_bindings(map()) :: map() + def map_path_bindings(bindings) when bindings == %{}, do: bindings + + def map_path_bindings(bindings) do + for {k, v} <- bindings, reduce: %{} do + acc -> FieldPath.decode_pair({to_string(k), v}, acc) + end + end +end diff --git a/lib/grpc/service.ex b/lib/grpc/service.ex index 9eaf717c..7162a83e 100644 --- a/lib/grpc/service.ex +++ b/lib/grpc/service.ex @@ -3,7 +3,7 @@ defmodule GRPC.Service do Define gRPC service used by Stub and Server. You should use `Protobuf` to to generate code instead of using this module directly. - It imports DSL functions like `rpc/3` and `stream/1` for defining the RPC + It imports DSL functions like `rpc/4` and `stream/1` for defining the RPC functions easily: defmodule Greeter.Service do @@ -11,11 +11,32 @@ defmodule GRPC.Service do rpc :SayHello, HelloRequest, stream(HelloReply) end + + `option (google.api.http)` annotations are supported for gRPC http/json transcoding. Once generated the 4th argument to `rpc/4` contains + the `Google.Api.HttpRule` option. + + defmodule Greeter.Service do + use GRPC.Service, name: "helloworld.Greeter" + + rpc(:SayHello, Helloworld.HelloRequest, Helloworld.HelloReply, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/greeter/{name}"}, + response_body: "", + selector: "" + } + } + }) + end """ defmacro __using__(opts) do quote do - import GRPC.Service, only: [rpc: 3, stream: 1] + import GRPC.Service, only: [rpc: 4, rpc: 3, stream: 1] Module.register_attribute(__MODULE__, :rpc_calls, accumulate: true) @before_compile GRPC.Service @@ -32,9 +53,10 @@ defmodule GRPC.Service do end end - defmacro rpc(name, request, reply) do + defmacro rpc(name, request, reply, options \\ quote(do: %{})) do quote do - @rpc_calls {unquote(name), unquote(wrap_stream(request)), unquote(wrap_stream(reply))} + @rpc_calls {unquote(name), unquote(wrap_stream(request)), unquote(wrap_stream(reply)), + unquote(options)} end end @@ -54,8 +76,11 @@ defmodule GRPC.Service do quote do: {unquote(param), false} end - def grpc_type({_, {_, false}, {_, false}}), do: :unary - def grpc_type({_, {_, true}, {_, false}}), do: :client_stream - def grpc_type({_, {_, false}, {_, true}}), do: :server_stream - def grpc_type({_, {_, true}, {_, true}}), do: :bidirectional_stream + def grpc_type({_, {_, false}, {_, false}, _}), do: :unary + def grpc_type({_, {_, true}, {_, false}, _}), do: :client_stream + def grpc_type({_, {_, false}, {_, true}, _}), do: :server_stream + def grpc_type({_, {_, true}, {_, true}, _}), do: :bidirectional_stream + + def rpc_options({_, _, _, options}), do: options + def rpc_options({_, _, _, options}, type), do: Map.get(options, type) end diff --git a/lib/grpc/status.ex b/lib/grpc/status.ex index aa2651a5..80daf66e 100644 --- a/lib/grpc/status.ex +++ b/lib/grpc/status.ex @@ -186,6 +186,25 @@ defmodule GRPC.Status do def code_name(15), do: "DataLoss" def code_name(16), do: "Unauthenticated" + @spec http_code(t()) :: t() + def http_code(0), do: 200 + def http_code(1), do: 400 + def http_code(2), do: 500 + def http_code(3), do: 400 + def http_code(4), do: 504 + def http_code(5), do: 404 + def http_code(6), do: 409 + def http_code(7), do: 403 + def http_code(8), do: 429 + def http_code(9), do: 412 + def http_code(10), do: 409 + def http_code(11), do: 400 + def http_code(12), do: 501 + def http_code(13), do: 500 + def http_code(14), do: 503 + def http_code(15), do: 500 + def http_code(16), do: 401 + @spec status_message(t()) :: String.t() | nil def status_message(0), do: nil def status_message(1), do: "The operation was cancelled (typically by the caller)" diff --git a/lib/grpc/stub.ex b/lib/grpc/stub.ex index d671197f..659ceafc 100644 --- a/lib/grpc/stub.ex +++ b/lib/grpc/stub.ex @@ -62,7 +62,8 @@ defmodule GRPC.Stub do service_mod = opts[:service] service_name = service_mod.__meta__(:name) - Enum.each(service_mod.__rpc_calls__, fn {name, {_, req_stream}, {_, res_stream}} = rpc -> + Enum.each(service_mod.__rpc_calls__, fn {name, {_, req_stream}, {_, res_stream}, _options} = + rpc -> func_name = name |> to_string |> Macro.underscore() path = "/#{service_name}/#{name}" grpc_type = GRPC.Service.grpc_type(rpc) @@ -237,7 +238,7 @@ defmodule GRPC.Stub do # with the last elem being a map of headers `%{headers: headers, trailers: trailers}`(unary) or # `%{headers: headers}`(server streaming) def call(_service_mod, rpc, %{channel: channel} = stream, request, opts) do - {_, {req_mod, req_stream}, {res_mod, response_stream}} = rpc + {_, {req_mod, req_stream}, {res_mod, response_stream}, _rpc_options} = rpc stream = %{stream | request_mod: req_mod, response_mod: res_mod} diff --git a/lib/grpc/transport/http2.ex b/lib/grpc/transport/http2.ex index 8831584c..0d47631f 100644 --- a/lib/grpc/transport/http2.ex +++ b/lib/grpc/transport/http2.ex @@ -12,6 +12,11 @@ defmodule GRPC.Transport.HTTP2 do %{"content-type" => "application/grpc-web-#{codec.name()}"} end + # TO-DO: refactor when we add a GRPC.Codec.content_type callback + def server_headers(%{codec: GRPC.Codec.JSON}) do + %{"content-type" => "application/json"} + end + def server_headers(%{codec: codec}) do %{"content-type" => "application/grpc+#{codec.name()}"} end diff --git a/livebooks/telemetry.livemd b/livebooks/telemetry.livemd index eedc982a..deaf0c04 100644 --- a/livebooks/telemetry.livemd +++ b/livebooks/telemetry.livemd @@ -180,7 +180,7 @@ defmodule MetricsSupervisor do end defp is_message(stream) do - stream.grpc_type in [:client_stream, :bidi_stream] + stream.grpc_type in [:client_stream, :bidirectional_stream] end @histogram_buckets_seconds [5.0e-3, 10.0e-3, 25.0e-3, 50.0e-3, 0.1, 0.25, 0.5, 1, 2.5, 5, 10] diff --git a/mix.exs b/mix.exs index bddeaa41..650a1a21 100644 --- a/mix.exs +++ b/mix.exs @@ -13,6 +13,7 @@ defmodule GRPC.Mixfile do start_permanent: Mix.env() == :prod, deps: deps(), package: package(), + aliases: aliases(), description: "The Elixir implementation of gRPC", docs: [ extras: ["README.md"], @@ -30,20 +31,29 @@ defmodule GRPC.Mixfile do ] end - # Configuration for the OTP application - # - # Type "mix help compile.app" for more information def application do [extra_applications: [:logger]] end + def escript do + [main_module: GRPC.Protoc.CLI, name: "protoc-gen-grpc_elixir"] + end + defp deps do [ {:cowboy, "~> 2.10"}, {:gun, "~> 2.0"}, - {:mint, "~> 1.5"}, + {:jason, ">= 0.0.0", optional: true}, {:cowlib, "~> 2.12"}, - {:protobuf, "~> 0.11", only: [:dev, :test]}, + {:protobuf, "~> 0.11"}, + {:protobuf_generate, "~> 0.1.1", only: [:dev, :test]}, + {:googleapis, + github: "googleapis/googleapis", + branch: "master", + app: false, + compile: false, + only: [:dev, :test]}, + {:mint, "~> 1.5"}, {:ex_doc, "~> 0.29", only: :dev}, {:dialyxir, "~> 1.4.0", only: [:dev, :test], runtime: false}, {:ex_parameterized, "~> 1.3.7", only: :test}, @@ -60,6 +70,58 @@ defmodule GRPC.Mixfile do } end + defp aliases do + [ + gen_bootstrap_protos: &gen_bootstrap_protos/1, + gen_test_protos: [&gen_bootstrap_protos/1, &gen_test_protos/1] + ] + end + defp elixirc_paths(:test), do: ["lib", "test/support"] defp elixirc_paths(_), do: ["lib"] + + defp gen_test_protos(_args) do + api_src = Mix.Project.deps_paths().googleapis + transcode_src = "test/support" + + protoc!( + [ + "--include-path=#{api_src}", + "--include-path=#{transcode_src}", + "--plugins=ProtobufGenerate.Plugins.GRPCWithOptions" + ], + "./#{transcode_src}", + ["test/support/transcode_messages.proto"] + ) + end + + # https://github.com/elixir-protobuf/protobuf/blob/cdf3acc53f619866b4921b8216d2531da52ceba7/mix.exs#L140 + defp gen_bootstrap_protos(_args) do + proto_src = Mix.Project.deps_paths().googleapis + + protoc!("--include-path=#{proto_src}", "./test/support", [ + "google/protobuf/descriptor.proto", + "google/api/http.proto", + "google/api/annotations.proto" + ]) + end + + defp protoc!(args, elixir_out, files_to_generate) when is_list(args) do + protoc!(Enum.join(args, " "), elixir_out, files_to_generate) + end + + defp protoc!(args, elixir_out, files_to_generate) + when is_binary(args) and is_binary(elixir_out) and is_list(files_to_generate) do + args = + [ + ~s(mix protobuf.generate), + ~s(--output-path="#{elixir_out}"), + args + ] ++ files_to_generate + + case Mix.shell().cmd(Enum.join(args, " ")) do + 0 -> Mix.Task.rerun("format", [Path.join([elixir_out, "**", "*.pb.ex"])]) + other -> Mix.raise("'protoc' exited with non-zero status: #{other}") + end + end end diff --git a/mix.lock b/mix.lock index 83eee06c..ff6e50e0 100644 --- a/mix.lock +++ b/mix.lock @@ -1,19 +1,22 @@ %{ - "cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"}, + "cowboy": {:hex, :cowboy, "2.11.0", "356bf784599cf6f2cdc6ad12fdcfb8413c2d35dab58404cf000e1feaed3f5645", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "0fa395437f1b0e104e0e00999f39d2ac5f4082ac5049b67a5b6d56ecc31b1403"}, "cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"}, - "dialyxir": {:hex, :dialyxir, "1.4.2", "764a6e8e7a354f0ba95d58418178d486065ead1f69ad89782817c296d0d746a5", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "516603d8067b2fd585319e4b13d3674ad4f314a5902ba8130cd97dc902ce6bbd"}, - "earmark_parser": {:hex, :earmark_parser, "1.4.32", "fa739a0ecfa34493de19426681b23f6814573faee95dfd4b4aafe15a7b5b32c6", [:mix], [], "hexpm", "b8b0dd77d60373e77a3d7e8afa598f325e49e8663a51bcc2b88ef41838cca755"}, + "dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, - "ex_doc": {:hex, :ex_doc, "0.29.4", "6257ecbb20c7396b1fe5accd55b7b0d23f44b6aa18017b415cb4c2b91d997729", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "2c6699a737ae46cb61e4ed012af931b57b699643b24dabe2400a8168414bc4f5"}, + "ex_doc": {:hex, :ex_doc, "0.31.1", "8a2355ac42b1cc7b2379da9e40243f2670143721dd50748bf6c3b1184dae2089", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "3178c3a407c557d8343479e1ff117a96fd31bafe52a039079593fb0524ef61b0"}, "ex_parameterized": {:hex, :ex_parameterized, "1.3.7", "801f85fc4651cb51f11b9835864c6ed8c5e5d79b1253506b5bb5421e8ab2f050", [:mix], [], "hexpm", "1fb0dc4aa9e8c12ae23806d03bcd64a5a0fc9cd3f4c5602ba72561c9b54a625c"}, + "googleapis": {:git, "https://github.com/googleapis/googleapis.git", "75c0ed03df97edf6b9c8191d9b61642863d00b61", [branch: "master"]}, "gun": {:hex, :gun, "2.0.1", "160a9a5394800fcba41bc7e6d421295cf9a7894c2252c0678244948e3336ad73", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "a10bc8d6096b9502205022334f719cc9a08d9adcfbfc0dbee9ef31b56274a20b"}, "hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"}, - "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, + "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, + "makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"}, "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, - "makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"}, - "mint": {:hex, :mint, "1.5.1", "8db5239e56738552d85af398798c80648db0e90f343c8469f6c6d8898944fb6f", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "4a63e1e76a7c3956abd2c72f370a0d0aecddc3976dea5c27eccbecfa5e7d5b1e"}, - "nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"}, - "protobuf": {:hex, :protobuf, "0.11.0", "58d5531abadea3f71135e97bd214da53b21adcdb5b1420aee63f4be8173ec927", [:mix], [{:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "30ad9a867a5c5a0616cac9765c4d2c2b7b0030fa81ea6d0c14c2eb5affb6ac52"}, + "makeup_erlang": {:hex, :makeup_erlang, "0.1.4", "29563475afa9b8a2add1b7a9c8fb68d06ca7737648f28398e04461f008b69521", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f4ed47ecda66de70dd817698a703f8816daa91272e7e45812469498614ae8b29"}, + "mint": {:hex, :mint, "1.5.2", "4805e059f96028948870d23d7783613b7e6b0e2fb4e98d720383852a760067fd", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "d77d9e9ce4eb35941907f1d3df38d8f750c357865353e21d335bdcdf6d892a02"}, + "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, + "protobuf": {:hex, :protobuf, "0.12.0", "58c0dfea5f929b96b5aa54ec02b7130688f09d2de5ddc521d696eec2a015b223", [:mix], [{:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "75fa6cbf262062073dd51be44dd0ab940500e18386a6c4e87d5819a58964dc45"}, + "protobuf_generate": {:hex, :protobuf_generate, "0.1.1", "f6098b85161dcfd48a4f6f1abee4ee5e057981dfc50aafb1aa4bd5b0529aa89b", [:mix], [{:protobuf, "~> 0.11", [hex: :protobuf, repo: "hexpm", optional: false]}], "hexpm", "93a38c8e2aba2a17e293e9ef1359122741f717103984aa6d1ebdca0efb17ab9d"}, "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, } diff --git a/priv/templates/service.ex.eex b/priv/templates/service.ex.eex new file mode 100644 index 00000000..bbea50b6 --- /dev/null +++ b/priv/templates/service.ex.eex @@ -0,0 +1,24 @@ +defmodule <%= @module %>.Service do + <%= unless @module_doc? do %> + @moduledoc false + <% end %> + use GRPC.Service, name: <%= inspect(@service_name) %>, protoc_gen_elixir_version: "<%= @version %>" + + <%= if @descriptor_fun_body do %> + def descriptor do + # credo:disable-for-next-line + <%= @descriptor_fun_body %> + end + <% end %> + + <%= for {method_name, input, output, options} <- @methods do %> + rpc :<%= method_name %>, <%= input %>, <%= output %>, <%= options %> + <% end %> +end + +defmodule <%= @module %>.Stub do + <%= unless @module_doc? do %> + @moduledoc false + <% end %> + use GRPC.Stub, service: <%= @module %>.Service +end diff --git a/src/grpc_stream_h.erl b/src/grpc_stream_h.erl index 4e2652e7..42e2c052 100644 --- a/src/grpc_stream_h.erl +++ b/src/grpc_stream_h.erl @@ -33,7 +33,7 @@ -> {[{spawn, pid(), timeout()}], #state{}}. init(StreamID, Req=#{ref := Ref}, Opts) -> Env = maps:get(env, Opts, #{}), - Middlewares = maps:get(middlewares, Opts, [cowboy_router, cowboy_handler]), + Middlewares = maps:get(middlewares, Opts, ['Elixir.GRPC.Server.Adapters.Cowboy.Router', cowboy_handler]), Shutdown = maps:get(shutdown_timeout, Opts, 5000), Pid = proc_lib:spawn_link(?MODULE, request_process, [Req, Env, Middlewares]), Expect = expect(Req), diff --git a/test/grpc/integration/client_interceptor_test.exs b/test/grpc/integration/client_interceptor_test.exs index c4721445..a699a0dc 100644 --- a/test/grpc/integration/client_interceptor_test.exs +++ b/test/grpc/integration/client_interceptor_test.exs @@ -7,7 +7,7 @@ defmodule GRPC.Integration.ClientInterceptorTest do def say_hello(req, stream) do headers = GRPC.Stream.get_headers(stream) label = headers["x-test-label"] - Helloworld.HelloReply.new(message: "Hello, #{req.name} #{label}") + %Helloworld.HelloReply{message: "Hello, #{req.name} #{label}"} end end @@ -67,7 +67,7 @@ defmodule GRPC.Integration.ClientInterceptorTest do ] ) - req = Helloworld.HelloRequest.new(name: "Elixir") + req = %Helloworld.HelloRequest{name: "Elixir"} {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert reply.message == "Hello, Elixir one two" @@ -111,7 +111,7 @@ defmodule GRPC.Integration.ClientInterceptorTest do ] ) - req = Helloworld.HelloRequest.new(name: "Elixir") + req = %Helloworld.HelloRequest{name: "Elixir"} try do Helloworld.Greeter.Stub.say_hello(channel, req) diff --git a/test/grpc/integration/codec_test.exs b/test/grpc/integration/codec_test.exs index 31fbf967..bdd63f05 100644 --- a/test/grpc/integration/codec_test.exs +++ b/test/grpc/integration/codec_test.exs @@ -23,7 +23,7 @@ defmodule GRPC.Integration.CodecTest do codecs: [GRPC.Codec.Proto, GRPC.Codec.Erlpack, GRPC.Codec.WebText] def say_hello(req, _stream) do - Helloworld.HelloReply.new(message: "Hello, #{req.name}") + %Helloworld.HelloReply{message: "Hello, #{req.name}"} end end @@ -35,7 +35,7 @@ defmodule GRPC.Integration.CodecTest do run_server(HelloServer, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") name = "Mairbek" - req = Helloworld.HelloRequest.new(name: name) + req = %Helloworld.HelloRequest{name: name} for codec <- [GRPC.Codec.Erlpack, GRPC.Codec.WebText, GRPC.Codec.Proto] do {:ok, reply} = HelloStub.say_hello(channel, req, codec: codec) @@ -56,7 +56,7 @@ defmodule GRPC.Integration.CodecTest do run_server(HelloServer, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") name = "Mairbek" - req = Helloworld.HelloRequest.new(name: name) + req = %Helloworld.HelloRequest{name: name} for {expected_content_type, codec} <- [ {"grpc-web-text", GRPC.Codec.WebText}, diff --git a/test/grpc/integration/compressor_test.exs b/test/grpc/integration/compressor_test.exs index 1bbc2cf3..6fb4b4ba 100644 --- a/test/grpc/integration/compressor_test.exs +++ b/test/grpc/integration/compressor_test.exs @@ -8,7 +8,7 @@ defmodule GRPC.Integration.CompressorTest do def say_hello(%{name: name = "only client compress"}, stream) do %{"grpc-encoding" => "gzip"} = GRPC.Stream.get_headers(stream) - Helloworld.HelloReply.new(message: "Hello, #{name}") + %Helloworld.HelloReply{message: "Hello, #{name}"} end def say_hello(%{name: name = "only server compress"}, stream) do @@ -17,13 +17,13 @@ defmodule GRPC.Integration.CompressorTest do end GRPC.Server.set_compressor(stream, GRPC.Compressor.Gzip) - Helloworld.HelloReply.new(message: "Hello, #{name}") + %Helloworld.HelloReply{message: "Hello, #{name}"} end def say_hello(%{name: name = "both compress"}, stream) do %{"grpc-encoding" => "gzip"} = GRPC.Stream.get_headers(stream) GRPC.Server.set_compressor(stream, GRPC.Compressor.Gzip) - Helloworld.HelloReply.new(message: "Hello, #{name}") + %Helloworld.HelloReply{message: "Hello, #{name}"} end end @@ -32,7 +32,7 @@ defmodule GRPC.Integration.CompressorTest do service: Helloworld.Greeter.Service def say_hello(%{name: name}, _stream) do - Helloworld.HelloReply.new(message: "Hello, #{name}") + %Helloworld.HelloReply{message: "Hello, #{name}"} end end @@ -45,7 +45,7 @@ defmodule GRPC.Integration.CompressorTest do {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") name = "only client compress" - req = Helloworld.HelloRequest.new(name: name) + req = %Helloworld.HelloRequest{name: name} {:ok, reply, headers} = channel @@ -61,7 +61,7 @@ defmodule GRPC.Integration.CompressorTest do {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") name = "only server compress" - req = Helloworld.HelloRequest.new(name: name) + req = %Helloworld.HelloRequest{name: name} # no accept-encoding header {:ok, reply, headers} = channel |> HelloStub.say_hello(req, return_headers: true) @@ -85,7 +85,7 @@ defmodule GRPC.Integration.CompressorTest do {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") name = "both compress" - req = Helloworld.HelloRequest.new(name: name) + req = %Helloworld.HelloRequest{name: name} {:ok, reply, headers} = channel @@ -101,7 +101,7 @@ defmodule GRPC.Integration.CompressorTest do {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") name = "both compress" - req = Helloworld.HelloRequest.new(name: name) + req = %Helloworld.HelloRequest{name: name} assert {:error, %GRPC.RPCError{message: _, status: 12}} = channel diff --git a/test/grpc/integration/connection_test.exs b/test/grpc/integration/connection_test.exs index cb44464f..53eb9b52 100644 --- a/test/grpc/integration/connection_test.exs +++ b/test/grpc/integration/connection_test.exs @@ -4,7 +4,7 @@ defmodule GRPC.Integration.ConnectionTest do test "reconnection works" do server = FeatureServer {:ok, _, port} = GRPC.Server.start(server, 0) - point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906) + point = %Routeguide.Point{latitude: 409_146_138, longitude: -746_188_906} {:ok, channel} = GRPC.Stub.connect("localhost:#{port}", adapter_opts: [retry_timeout: 10]) assert {:ok, _} = channel |> Routeguide.RouteGuide.Stub.get_feature(point) :ok = GRPC.Server.stop(server) @@ -21,7 +21,7 @@ defmodule GRPC.Integration.ConnectionTest do {:ok, _, _} = GRPC.Server.start(server, 0, ip: {:local, socket_path}) {:ok, channel} = GRPC.Stub.connect(socket_path, adapter_opts: [retry_timeout: 10]) - point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906) + point = %Routeguide.Point{latitude: 409_146_138, longitude: -746_188_906} assert {:ok, _} = channel |> Routeguide.RouteGuide.Stub.get_feature(point) :ok = GRPC.Server.stop(server) end @@ -34,7 +34,7 @@ defmodule GRPC.Integration.ConnectionTest do {:ok, _, port} = GRPC.Server.start(server, 0, cred: cred) try do - point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906) + point = %Routeguide.Point{latitude: 409_146_138, longitude: -746_188_906} {:ok, channel} = GRPC.Stub.connect("localhost:#{port}", cred: cred) assert {:ok, _} = Routeguide.RouteGuide.Stub.get_feature(channel, point) diff --git a/test/grpc/integration/endpoint_test.exs b/test/grpc/integration/endpoint_test.exs index 5f1afb46..a4dd0c2d 100644 --- a/test/grpc/integration/endpoint_test.exs +++ b/test/grpc/integration/endpoint_test.exs @@ -6,7 +6,7 @@ defmodule GRPC.Integration.EndpointTest do use GRPC.Server, service: Helloworld.Greeter.Service def say_hello(req, _stream) do - Helloworld.HelloReply.new(message: "Hello, #{req.name}") + %Helloworld.HelloReply{message: "Hello, #{req.name}"} end end @@ -21,7 +21,7 @@ defmodule GRPC.Integration.EndpointTest do def init(_), do: [] def call(_, stream, _next, _) do - {:ok, stream, Helloworld.HelloReply.new(message: "Hello by interceptor")} + {:ok, stream, %Helloworld.HelloReply{message: "Hello by interceptor"}} end end @@ -29,7 +29,7 @@ defmodule GRPC.Integration.EndpointTest do use GRPC.Server, service: Routeguide.RouteGuide.Service def get_feature(point, _stream) do - Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}") + %Routeguide.Feature{location: point, name: "#{point.latitude},#{point.longitude}"} end def list_features(rectangle, stream) do @@ -40,11 +40,11 @@ defmodule GRPC.Integration.EndpointTest do end def record_route(enum, _stream) do - Routeguide.RouteSummary.new(point_count: Enum.count(enum)) + %Routeguide.RouteSummary{point_count: Enum.count(enum)} end defp simple_feature(point) do - Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}") + %Routeguide.Feature{location: point, name: "#{point.latitude},#{point.longitude}"} end end @@ -68,7 +68,7 @@ defmodule GRPC.Integration.EndpointTest do run_endpoint(HelloEndpoint, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "Elixir") + req = %Helloworld.HelloRequest{name: "Elixir"} {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert reply.message == "Hello, Elixir" end) @@ -80,10 +80,10 @@ defmodule GRPC.Integration.EndpointTest do run_endpoint(FeatureEndpoint, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906) - rect = Routeguide.Rectangle.new(hi: point, lo: point) + point = %Routeguide.Point{latitude: 409_146_138, longitude: -746_188_906} + rect = %Routeguide.Rectangle{hi: point, lo: point} {:ok, enum} = channel |> Routeguide.RouteGuide.Stub.list_features(rect) - loc = Routeguide.Feature.new(location: point, name: "409146138,-746188906") + loc = %Routeguide.Feature{location: point, name: "409146138,-746188906"} assert [{:ok, loc}, {:ok, loc}] == Enum.to_list(enum) end) end) =~ "GRPC.Integration.EndpointTest.FeatureServer.list_features" @@ -94,13 +94,13 @@ defmodule GRPC.Integration.EndpointTest do run_endpoint(FeatureEndpoint, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - point0 = Routeguide.Point.new(latitude: 0, longitude: -1) - point1 = Routeguide.Point.new(latitude: 1, longitude: 1) + point0 = %Routeguide.Point{latitude: 0, longitude: -1} + point1 = %Routeguide.Point{latitude: 1, longitude: 1} stream = channel |> Routeguide.RouteGuide.Stub.record_route() GRPC.Stub.send_request(stream, point0) GRPC.Stub.send_request(stream, point1, end_stream: true) reply = GRPC.Stub.recv(stream) - assert {:ok, Routeguide.RouteSummary.new(point_count: 2)} == reply + assert {:ok, %Routeguide.RouteSummary{point_count: 2}} == reply end) end) =~ "GRPC.Integration.EndpointTest.FeatureServer.record_route" end @@ -110,13 +110,13 @@ defmodule GRPC.Integration.EndpointTest do run_endpoint(FeatureAndHelloHaltEndpoint, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906) + point = %Routeguide.Point{latitude: 409_146_138, longitude: -746_188_906} {:ok, feature} = channel |> Routeguide.RouteGuide.Stub.get_feature(point) assert feature == - Routeguide.Feature.new(location: point, name: "409146138,-746188906") + %Routeguide.Feature{location: point, name: "409146138,-746188906"} - req = Helloworld.HelloRequest.new(name: "Elixir") + req = %Helloworld.HelloRequest{name: "Elixir"} {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert reply.message == "Hello by interceptor" end) diff --git a/test/grpc/integration/namespace_test.exs b/test/grpc/integration/namespace_test.exs index f876178e..d187ce80 100644 --- a/test/grpc/integration/namespace_test.exs +++ b/test/grpc/integration/namespace_test.exs @@ -5,16 +5,16 @@ defmodule GRPC.Integration.NamespaceTest do use GRPC.Server, service: Routeguide.RouteGuide.Service def get_feature(point, _stream) do - Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}") + %Routeguide.Feature{location: point, name: "#{point.latitude},#{point.longitude}"} end end test "it works when outer namespace is same with inner's" do run_server(FeatureServer, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906) + point = %Routeguide.Point{latitude: 409_146_138, longitude: -746_188_906} {:ok, feature} = channel |> Routeguide.RouteGuide.Stub.get_feature(point) - assert feature == Routeguide.Feature.new(location: point, name: "409146138,-746188906") + assert feature == %Routeguide.Feature{location: point, name: "409146138,-746188906"} end) end end diff --git a/test/grpc/integration/server_test.exs b/test/grpc/integration/server_test.exs index 0bcf6aa2..7a6ae8cd 100644 --- a/test/grpc/integration/server_test.exs +++ b/test/grpc/integration/server_test.exs @@ -5,7 +5,75 @@ defmodule GRPC.Integration.ServerTest do use GRPC.Server, service: Routeguide.RouteGuide.Service def get_feature(point, _stream) do - Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}") + %Routeguide.Feature{location: point, name: "#{point.latitude},#{point.longitude}"} + end + end + + defmodule TranscodeErrorServer do + use GRPC.Server, + service: Transcode.Messaging.Service, + http_transcode: true + + def get_message(req, _stream) do + status = String.to_existing_atom(req.name) + + raise GRPC.RPCError, status: status + end + end + + defmodule TranscodeServer do + use GRPC.Server, + service: Transcode.Messaging.Service, + http_transcode: true + + def get_message(msg_request, _stream) do + %Transcode.Message{name: msg_request.name, text: "get_message"} + end + + def stream_messages(msg_request, stream) do + Enum.each(1..5, fn i -> + msg = %Transcode.Message{ + name: msg_request.name, + text: "#{i}" + } + + GRPC.Server.send_reply(stream, msg) + end) + end + + def create_message(msg, _stream) do + msg + end + + def create_message_with_nested_body(msg_request, _stream) do + %Transcode.Message{ + name: msg_request.message.name, + text: "create_message_with_nested_body" + } + end + + def get_message_with_field_path(msg_request, _) do + msg_request.message + end + + def get_message_with_response_body(msg_request, _) do + %Transcode.MessageOut{ + response: %Transcode.Message{ + name: msg_request.name, + text: "get_message_with_response_body" + } + } + end + + def get_message_with_query(msg_request, _stream) do + %Transcode.Message{name: msg_request.name, text: "get_message_with_query"} + end + + def get_message_with_subpath_query(msg_request, _stream) do + %Transcode.Message{ + name: msg_request.message.name, + text: "get_message_with_subpath_query" + } end end @@ -19,37 +87,37 @@ defmodule GRPC.Integration.ServerTest do def say_hello(%{name: "delay", duration: duration}, _stream) do Process.sleep(duration) - Helloworld.HelloReply.new(message: "Hello") + %Helloworld.HelloReply{message: "Hello"} end def say_hello(%{name: "large response"}, _stream) do name = String.duplicate("a", round(:math.pow(2, 14))) - Helloworld.HelloReply.new(message: "Hello, #{name}") + %Helloworld.HelloReply{message: "Hello, #{name}"} end def say_hello(%{name: "get peer"}, stream) do {ip, _port} = stream.adapter.get_peer(stream.payload) name = to_string(:inet_parse.ntoa(ip)) - Helloworld.HelloReply.new(message: "Hello, #{name}") + %Helloworld.HelloReply{message: "Hello, #{name}"} end def say_hello(%{name: "get cert"}, stream) do case stream.adapter.get_cert(stream.payload) do :undefined -> - Helloworld.HelloReply.new(message: "Hello, unauthenticated") + %Helloworld.HelloReply{message: "Hello, unauthenticated"} _ -> - Helloworld.HelloReply.new(message: "Hello, authenticated") + %Helloworld.HelloReply{message: "Hello, authenticated"} end end def say_hello(req, _stream) do - Helloworld.HelloReply.new(message: "Hello, #{req.name}") + %Helloworld.HelloReply{message: "Hello, #{req.name}"} end def check_headers(_req, stream) do token = GRPC.Stream.get_headers(stream)["authorization"] - Helloworld.HeaderReply.new(authorization: token) + %Helloworld.HeaderReply{authorization: token} end end @@ -79,7 +147,7 @@ defmodule GRPC.Integration.ServerTest do end defp simple_feature(point) do - Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}") + %Routeguide.Feature{location: point, name: "#{point.latitude},#{point.longitude}"} end end @@ -104,7 +172,7 @@ defmodule GRPC.Integration.ServerTest do end defp simple_feature(point) do - Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}") + %Routeguide.Feature{location: point, name: "#{point.latitude},#{point.longitude}"} end end @@ -117,11 +185,11 @@ defmodule GRPC.Integration.ServerTest do test "multiple servers works" do run_server([FeatureServer, HelloServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906) + point = %Routeguide.Point{latitude: 409_146_138, longitude: -746_188_906} {:ok, feature} = channel |> Routeguide.RouteGuide.Stub.get_feature(point) - assert feature == Routeguide.Feature.new(location: point, name: "409146138,-746188906") + assert feature == %Routeguide.Feature{location: point, name: "409146138,-746188906"} - req = Helloworld.HelloRequest.new(name: "Elixir") + req = %Helloworld.HelloRequest{name: "Elixir"} {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert reply.message == "Hello, Elixir" end) @@ -134,14 +202,15 @@ defmodule GRPC.Integration.ServerTest do [HelloServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "Elixir") + req = %Helloworld.HelloRequest{name: "Elixir"} {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert reply.message == "Hello, Elixir" {:ok, conn_pid} = :gun.open(~c"localhost", port) stream_ref = :gun.get(conn_pid, "/status") + Process.sleep(100) - assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert_received {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} end, 0, adapter_opts: [status_handler: status_handler] @@ -151,7 +220,7 @@ defmodule GRPC.Integration.ServerTest do test "returns appropriate error for unary requests" do run_server([HelloErrorServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "Elixir") + req = %Helloworld.HelloRequest{name: "Elixir"} {:error, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert %GRPC.RPCError{ @@ -164,7 +233,7 @@ defmodule GRPC.Integration.ServerTest do test "return errors for unknown errors" do run_server([HelloErrorServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "unknown error") + req = %Helloworld.HelloRequest{name: "unknown error"} assert {:error, %GRPC.RPCError{message: "Internal Server Error", status: GRPC.Status.unknown()}} == @@ -175,7 +244,7 @@ defmodule GRPC.Integration.ServerTest do test "returns appropriate error for stream requests" do run_server([FeatureErrorServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - rect = Routeguide.Rectangle.new() + rect = %Routeguide.Rectangle{} error = %GRPC.RPCError{message: "Please authenticate", status: 16} assert {:error, ^error} = channel |> Routeguide.RouteGuide.Stub.list_features(rect) end) @@ -184,7 +253,7 @@ defmodule GRPC.Integration.ServerTest do test "return large response(more than MAX_FRAME_SIZE 16384)" do run_server([HelloServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "large response") + req = %Helloworld.HelloRequest{name: "large response"} {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) name = String.duplicate("a", round(:math.pow(2, 14))) assert "Hello, #{name}" == reply.message @@ -194,7 +263,7 @@ defmodule GRPC.Integration.ServerTest do test "return deadline error for slow server" do run_server([TimeoutServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - rect = Routeguide.Rectangle.new() + rect = %Routeguide.Rectangle{} error = %GRPC.RPCError{message: "Deadline expired", status: 4} assert {:error, ^error} = @@ -205,9 +274,9 @@ defmodule GRPC.Integration.ServerTest do test "return normally for a little slow server" do run_server([SlowServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - low = Routeguide.Point.new(latitude: 400_000_000, longitude: -750_000_000) - high = Routeguide.Point.new(latitude: 420_000_000, longitude: -730_000_000) - rect = Routeguide.Rectangle.new(lo: low, hi: high) + low = %Routeguide.Point{latitude: 400_000_000, longitude: -750_000_000} + high = %Routeguide.Point{latitude: 420_000_000, longitude: -730_000_000} + rect = %Routeguide.Rectangle{lo: low, hi: high} {:ok, stream} = channel |> Routeguide.RouteGuide.Stub.list_features(rect, timeout: 500) Enum.each(stream, fn {:ok, feature} -> @@ -225,8 +294,7 @@ defmodule GRPC.Integration.ServerTest do headers: [{"authorization", token}] ) - {:ok, reply} = - channel |> Helloworld.Greeter.Stub.check_headers(Helloworld.HeaderRequest.new()) + {:ok, reply} = channel |> Helloworld.Greeter.Stub.check_headers(%Helloworld.HeaderRequest{}) assert reply.authorization == token end) @@ -236,7 +304,7 @@ defmodule GRPC.Integration.ServerTest do run_server([HelloServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "get peer") + req = %Helloworld.HelloRequest{name: "get peer"} {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert reply.message == "Hello, 127.0.0.1" end) @@ -246,12 +314,253 @@ defmodule GRPC.Integration.ServerTest do run_server([HelloServer], fn port -> assert {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "get cert") + req = %Helloworld.HelloRequest{name: "get cert"} assert {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert reply.message == "Hello, unauthenticated" end) end + describe "http/json transcode" do + test "grpc method can be called using json when http_transcode == true" do + run_server([TranscodeServer], fn port -> + name = "direct_call" + + {:ok, conn_pid} = :gun.open(~c"localhost", port) + + stream_ref = + :gun.post( + conn_pid, + "/transcode.Messaging/GetMessage", + [ + {"content-type", "application/json"} + ], + Jason.encode!(%{"name" => name}) + ) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + + assert %{"text" => "get_message"} = Jason.decode!(body) + end) + end + + test "should map grpc error codes to http status" do + run_server([TranscodeErrorServer], fn port -> + for {code_name, status} <- [ + {"cancelled", 400}, + {"unknown", 500}, + {"invalid_argument", 400}, + {"deadline_exceeded", 504}, + {"not_found", 404}, + {"already_exists", 409}, + {"permission_denied", 403}, + {"resource_exhausted", 429}, + {"failed_precondition", 412}, + {"aborted", 409}, + {"out_of_range", 400}, + {"unimplemented", 501}, + {"internal", 500}, + {"unavailable", 503}, + {"data_loss", 500}, + {"unauthenticated", 401} + ] do + {:ok, conn_pid} = :gun.open(~c"localhost", port) + + stream_ref = + :gun.get( + conn_pid, + "/v1/messages/#{code_name}", + [ + {"accept", "application/json"} + ] + ) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :fin, ^status, _headers} + end + end) + end + + test "accept: application/json can be used with get requests" do + run_server([TranscodeServer], fn port -> + name = "direct_call" + + {:ok, conn_pid} = :gun.open(~c"localhost", port) + + stream_ref = + :gun.get(conn_pid, "/v1/messages/#{name}", [ + {"accept", "application/json"} + ]) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + + assert %{"text" => "get_message"} = Jason.decode!(body) + end) + end + + test "can transcode path params" do + run_server([TranscodeServer], fn port -> + name = "foo" + + {:ok, conn_pid} = :gun.open(~c"localhost", port) + + stream_ref = + :gun.get(conn_pid, "/v1/messages/#{name}", [ + {"content-type", "application/json"} + ]) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + + assert %{ + "name" => ^name, + "text" => _name + } = Jason.decode!(body) + end) + end + + test "can transcode query params" do + run_server([TranscodeServer], fn port -> + {:ok, conn_pid} = :gun.open(~c"localhost", port) + + stream_ref = + :gun.get(conn_pid, "/v1/messages?name=some_name", [ + {"content-type", "application/json"} + ]) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + + assert %{ + "name" => "some_name", + "text" => "get_message_with_query" + } = Jason.decode!(body) + end) + end + + test "can map request body using HttpRule.body and response using HttpRule.response_body" do + run_server([TranscodeServer], fn port -> + {:ok, conn_pid} = :gun.open(~c"localhost", port) + + body = %{"name" => "name"} + + stream_ref = + :gun.post( + conn_pid, + "/v1/messages/nested", + [ + {"content-type", "application/json"} + ], + Jason.encode!(body) + ) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + + assert %{"name" => "name", "text" => "create_message_with_nested_body"} = + Jason.decode!(body) + end) + end + + test "can map response body using HttpRule.response_body" do + run_server([TranscodeServer], fn port -> + {:ok, conn_pid} = :gun.open(~c"localhost", port) + name = "response_body_mapper" + + stream_ref = + :gun.get( + conn_pid, + "/v1/messages/response_body/#{name}", + [ + {"content-type", "application/json"} + ] + ) + + assert_receive {:gun_up, ^conn_pid, :http} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + + assert %{"name" => ^name, "text" => "get_message_with_response_body"} = + Jason.decode!(body) + end) + end + + test "can send streaming responses" do + run_server([TranscodeServer], fn port -> + {:ok, conn_pid} = :gun.open(~c"localhost", port) + + stream_ref = + :gun.get( + conn_pid, + "/v1/messages/stream/stream_test", + [ + {"content-type", "application/json"} + ] + ) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + msgs = String.split(body, "\n", trim: true) + assert length(msgs) == 5 + end) + end + + test "can use field paths in requests" do + run_server([TranscodeServer], fn port -> + {:ok, conn_pid} = :gun.open(~c"localhost", port) + name = "fieldpath" + + stream_ref = + :gun.get( + conn_pid, + "/v1/messages/fieldpath/#{name}", + [ + {"content-type", "application/json"} + ] + ) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + assert %{"name" => ^name} = Jason.decode!(body) + end) + end + + test "service methods can have the same path but different methods in http rule option" do + run_server([TranscodeServer], fn port -> + {:ok, conn_pid} = :gun.open(~c"localhost", port) + + payload = %{"name" => "foo", "text" => "bar"} + + stream_ref = + :gun.post( + conn_pid, + "/v1/messages", + [ + {"content-type", "application/json"} + ], + Jason.encode!(payload) + ) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + + assert ^payload = Jason.decode!(body) + + stream_ref = + :gun.get(conn_pid, "/v1/messages?name=another_name", [ + {"content-type", "application/json"} + ]) + + assert_receive {:gun_response, ^conn_pid, ^stream_ref, :nofin, 200, _headers} + assert {:ok, body} = :gun.await_body(conn_pid, stream_ref) + + assert %{ + "name" => "another_name", + "text" => "get_message_with_query" + } = Jason.decode!(body) + end) + end + end + describe "telemetry" do test "sends server start+stop events on success" do server_rpc_prefix = GRPC.Telemetry.server_rpc_prefix() @@ -276,7 +585,7 @@ defmodule GRPC.Integration.ServerTest do run_server([HelloServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "delay", duration: 1000) + req = %Helloworld.HelloRequest{name: "delay", duration: 1000} assert {:ok, _} = Helloworld.Greeter.Stub.say_hello(channel, req) end) @@ -310,7 +619,8 @@ defmodule GRPC.Integration.ServerTest do assert %{ stream: %GRPC.Client.Stream{ rpc: - {"say_hello", {Helloworld.HelloRequest, false}, {Helloworld.HelloReply, false}} + {"say_hello", {Helloworld.HelloRequest, false}, {Helloworld.HelloReply, false}, + %{}} } } = metadata @@ -321,7 +631,8 @@ defmodule GRPC.Integration.ServerTest do assert %{ stream: %GRPC.Client.Stream{ rpc: - {"say_hello", {Helloworld.HelloRequest, false}, {Helloworld.HelloReply, false}} + {"say_hello", {Helloworld.HelloRequest, false}, {Helloworld.HelloReply, false}, + %{}} } } = metadata @@ -351,7 +662,7 @@ defmodule GRPC.Integration.ServerTest do run_server([HelloServer], fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "raise", duration: 1100) + req = %Helloworld.HelloRequest{name: "raise", duration: 1100} assert {:error, %GRPC.RPCError{status: 2}} = Helloworld.Greeter.Stub.say_hello(channel, req) @@ -398,7 +709,8 @@ defmodule GRPC.Integration.ServerTest do assert %{ stream: %GRPC.Client.Stream{ rpc: - {"say_hello", {Helloworld.HelloRequest, false}, {Helloworld.HelloReply, false}} + {"say_hello", {Helloworld.HelloRequest, false}, {Helloworld.HelloReply, false}, + %{}} } } = metadata @@ -409,7 +721,8 @@ defmodule GRPC.Integration.ServerTest do assert %{ stream: %GRPC.Client.Stream{ rpc: - {"say_hello", {Helloworld.HelloRequest, false}, {Helloworld.HelloReply, false}} + {"say_hello", {Helloworld.HelloRequest, false}, {Helloworld.HelloReply, false}, + %{}} } } = metadata diff --git a/test/grpc/integration/service_test.exs b/test/grpc/integration/service_test.exs index 497bf1cd..ba321e2b 100644 --- a/test/grpc/integration/service_test.exs +++ b/test/grpc/integration/service_test.exs @@ -24,12 +24,12 @@ defmodule GRPC.Integration.ServiceTest do fake_num = length(points) - Routeguide.RouteSummary.new( + %Routeguide.RouteSummary{ point_count: fake_num, feature_count: fake_num, distance: fake_num, elapsed_time: fake_num - ) + } end def route_chat(req_enum, stream) do @@ -56,30 +56,30 @@ defmodule GRPC.Integration.ServiceTest do end defp simple_feature(point) do - Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}") + %Routeguide.Feature{location: point, name: "#{point.latitude},#{point.longitude}"} end end test "unary RPC works" do run_server(FeatureServer, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - point = Routeguide.Point.new(latitude: 409_146_138, longitude: -746_188_906) + point = %Routeguide.Point{latitude: 409_146_138, longitude: -746_188_906} {:ok, feature} = channel |> Routeguide.RouteGuide.Stub.get_feature(point) - assert feature == Routeguide.Feature.new(location: point, name: "409146138,-746188906") + assert feature == %Routeguide.Feature{location: point, name: "409146138,-746188906"} end) end test "server streaming RPC works" do run_server(FeatureServer, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - low = Routeguide.Point.new(latitude: 400_000_000, longitude: -750_000_000) - high = Routeguide.Point.new(latitude: 420_000_000, longitude: -730_000_000) - rect = Routeguide.Rectangle.new(lo: low, hi: high) + low = %Routeguide.Point{latitude: 400_000_000, longitude: -750_000_000} + high = %Routeguide.Point{latitude: 420_000_000, longitude: -730_000_000} + rect = %Routeguide.Rectangle{lo: low, hi: high} {:ok, stream} = channel |> Routeguide.RouteGuide.Stub.list_features(rect) assert Enum.to_list(stream) == [ - {:ok, Routeguide.Feature.new(location: low, name: "400000000,-750000000")}, - {:ok, Routeguide.Feature.new(location: high, name: "420000000,-730000000")} + {:ok, %Routeguide.Feature{location: low, name: "400000000,-750000000"}}, + {:ok, %Routeguide.Feature{location: high, name: "420000000,-730000000"}} ] end) end @@ -87,8 +87,8 @@ defmodule GRPC.Integration.ServiceTest do test "client streaming RPC works" do run_server(FeatureServer, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - point1 = Routeguide.Point.new(latitude: 400_000_000, longitude: -750_000_000) - point2 = Routeguide.Point.new(latitude: 420_000_000, longitude: -730_000_000) + point1 = %Routeguide.Point{latitude: 400_000_000, longitude: -750_000_000} + point2 = %Routeguide.Point{latitude: 420_000_000, longitude: -730_000_000} stream = channel |> Routeguide.RouteGuide.Stub.record_route() GRPC.Stub.send_request(stream, point1) GRPC.Stub.send_request(stream, point2, end_stream: true) @@ -105,8 +105,8 @@ defmodule GRPC.Integration.ServiceTest do task = Task.async(fn -> Enum.each(1..6, fn i -> - point = Routeguide.Point.new(latitude: 0, longitude: rem(i, 3) + 1) - note = Routeguide.RouteNote.new(location: point, message: "Message #{i}") + point = %Routeguide.Point{latitude: 0, longitude: rem(i, 3) + 1} + note = %Routeguide.RouteNote{location: point, message: "Message #{i}"} opts = if i == 6, do: [end_stream: true], else: [] GRPC.Stub.send_request(stream, note, opts) end) @@ -137,8 +137,8 @@ defmodule GRPC.Integration.ServiceTest do task = Task.async(fn -> Enum.each(1..5, fn i -> - point = Routeguide.Point.new(latitude: 0, longitude: rem(i, 3) + 1) - note = Routeguide.RouteNote.new(location: point, message: "Message #{i}") + point = %Routeguide.Point{latitude: 0, longitude: rem(i, 3) + 1} + note = %Routeguide.RouteNote{location: point, message: "Message #{i}"} # note that we don't send end of stream yet here GRPC.Stub.send_request(stream, note, []) end) @@ -155,8 +155,8 @@ defmodule GRPC.Integration.ServiceTest do assert "Reply: " <> _msg = note.message if note.message == "Reply: Message 5" do - point = Routeguide.Point.new(latitude: 0, longitude: rem(6, 3) + 1) - note = Routeguide.RouteNote.new(location: point, message: "Message #{6}") + point = %Routeguide.Point{latitude: 0, longitude: rem(6, 3) + 1} + note = %Routeguide.RouteNote{location: point, message: "Message #{6}"} GRPC.Stub.send_request(stream, note, end_stream: true) end diff --git a/test/grpc/integration/stub_test.exs b/test/grpc/integration/stub_test.exs index fd37e954..b4970036 100644 --- a/test/grpc/integration/stub_test.exs +++ b/test/grpc/integration/stub_test.exs @@ -5,7 +5,7 @@ defmodule GRPC.Integration.StubTest do use GRPC.Server, service: Helloworld.Greeter.Service def say_hello(req, _stream) do - Helloworld.HelloReply.new(message: "Hello, #{req.name}") + %Helloworld.HelloReply{message: "Hello, #{req.name}"} end end @@ -62,7 +62,7 @@ defmodule GRPC.Integration.StubTest do GRPC.Stub.connect("localhost:#{port}", interceptors: [GRPC.Client.Interceptors.Logger]) name = String.duplicate("a", round(:math.pow(2, 15))) - req = Helloworld.HelloRequest.new(name: name) + req = %Helloworld.HelloRequest{name: name} {:ok, reply} = channel |> Helloworld.Greeter.Stub.say_hello(req) assert reply.message == "Hello, #{name}" end) @@ -71,7 +71,7 @@ defmodule GRPC.Integration.StubTest do test "returns error when timeout" do run_server(SlowServer, fn port -> {:ok, channel} = GRPC.Stub.connect("localhost:#{port}") - req = Helloworld.HelloRequest.new(name: "Elixir") + req = %Helloworld.HelloRequest{name: "Elixir"} assert {:error, %GRPC.RPCError{ diff --git a/test/grpc/server/adapter/cowboy/router_test.exs b/test/grpc/server/adapter/cowboy/router_test.exs new file mode 100644 index 00000000..9b3343a5 --- /dev/null +++ b/test/grpc/server/adapter/cowboy/router_test.exs @@ -0,0 +1,168 @@ +defmodule GRPC.Server.Adapters.Cowboy.RouterTest do + use ExUnit.Case, async: true + alias GRPC.Server.Adapters.Cowboy.Router + + describe "match/3" do + test "with no_host" do + assert {:error, :notfound, :host} = Router.match([], [], []) + end + + test "with no bindings" do + dispatch = make_dispatch("/transcode.Messaging/GetMessage") + + assert {:ok, Handler, [], %{}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/transcode.Messaging/GetMessage") + + assert {:error, :notfound, :path} == Router.match(dispatch, "localhost", "/unknown/path") + end + + test "with simple bindings" do + dispatch = make_dispatch("/v1/{name}") + + assert {:ok, Handler, [], %{name: "messages"}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1/messages") + end + + test "with nested bindings" do + dispatch = make_dispatch("/v1/{message.name}") + + assert {:ok, Handler, [], %{"message.name": "messages"}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1/messages") + end + + test "with multiple bindings" do + dispatch = make_dispatch("/v1/users/{user_id}/messages/{message.message_id}") + + assert {:ok, Handler, [], %{user_id: "1", "message.message_id": "2"}, :undefined, + :undefined} == + Router.match(dispatch, "localhost", "/v1/users/1/messages/2") + end + + test "with multiple sequential bindings" do + dispatch = make_dispatch("/v1/{a}/{b}/{c}") + + assert {:ok, Handler, [], %{a: "a", b: "b", c: "c"}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1/a/b/c") + end + + test "with any " do + dispatch = make_dispatch("/*") + + assert {:ok, Handler, [], %{}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1") + end + + test "with 'any' assignment" do + dispatch = make_dispatch("/{a=*}") + + assert {:ok, Handler, [], %{a: "v1"}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1") + end + + test "with 'catch all' assignment" do + dispatch = make_dispatch("/{a=**}") + + assert {:ok, Handler, [], %{a: "v1/messages"}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1/messages") + end + + test "with 'any' and 'catch all'" do + dispatch = make_dispatch("/*/**") + + assert {:ok, Handler, [], %{}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1/foo/bar/baz") + end + + test "with 'any' and 'catch all' assignment" do + dispatch = make_dispatch("/*/a/{b=c/*}/d/{e=**}") + + assert {:ok, Handler, [], %{b: "c/foo", e: "bar/baz/biz"}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1/a/c/foo/d/bar/baz/biz") + end + + test "with complex binding" do + dispatch = make_dispatch("/v1/{name=messages}") + + assert {:ok, Handler, [], %{name: "messages"}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1/messages") + + assert {:error, :notfound, :path} == + Router.match(dispatch, "localhost", "/v1/should_not_match") + end + + test "with complex binding and 'any'" do + dispatch = make_dispatch("/v1/{name=messages/*}") + + assert {:ok, Handler, [], %{name: "messages/12345"}, :undefined, :undefined} == + Router.match(dispatch, "localhost", "/v1/messages/12345") + + assert {:error, :notfound, :path} == + Router.match(dispatch, "localhost", "/v1/should_not_match/12345") + end + + test "with complex binding, wildcards and trailing route" do + dispatch = make_dispatch("/v1/{name=shelves/*/books/*}") + + assert {:ok, Handler, [], %{name: "shelves/example-shelf/books/example-book"}, :undefined, + :undefined} == + Router.match(dispatch, "localhost", "/v1/shelves/example-shelf/books/example-book") + + assert {:error, :notfound, :path} == + Router.match(dispatch, "localhost", "/v1/shelves/example-shelf/not_books") + end + + test "with complex binding, wildcards and suffix" do + dispatch = make_dispatch("/v1/{name=shelves/*/books/*}/suffix") + + assert {:ok, Handler, [], %{name: "shelves/example-shelf/books/example-book"}, :undefined, + :undefined} == + Router.match( + dispatch, + "localhost", + "/v1/shelves/example-shelf/books/example-book/suffix" + ) + + assert {:error, :notfound, :path} == + Router.match( + dispatch, + "localhost", + "/v1/shelves/example-shelf/books/example-book/another_suffix" + ) + end + + test "with mixed complex binding" do + dispatch = make_dispatch("/v1/{a=users/*}/messages/{message_id}/{c=books/*}") + + assert {:ok, Handler, [], %{a: "users/foobar", message_id: "1", c: "books/barbaz"}, + :undefined, + :undefined} == + Router.match(dispatch, "localhost", "/v1/users/foobar/messages/1/books/barbaz") + + assert {:error, :notfound, :path} == + Router.match(dispatch, "localhost", "/v1/users/1/books/barbaz") + end + + test "with mixed sequential complex binding" do + dispatch = make_dispatch("/v1/{a=users/*}/{b=messages}/{c=books/*}") + + assert {:ok, Handler, [], %{a: "users/foobar", b: "messages", c: "books/barbaz"}, + :undefined, + :undefined} == + Router.match(dispatch, "localhost", "/v1/users/foobar/messages/books/barbaz") + + assert {:error, :notfound, :path} == + Router.match(dispatch, "localhost", "/v1/users/foobar/messages/book/books/barbaz") + end + end + + defp make_dispatch(path) do + {_method, _, match} = GRPC.Server.Router.build_route(path) + + [ + {:_, [], + [ + {match, [], Handler, []} + ]} + ] + end +end diff --git a/test/grpc/server/router/query_test.exs b/test/grpc/server/router/query_test.exs new file mode 100644 index 00000000..ffa4d0ca --- /dev/null +++ b/test/grpc/server/router/query_test.exs @@ -0,0 +1,30 @@ +defmodule GRPC.Server.Router.QueryTest do + use ExUnit.Case, async: true + alias GRPC.Server.Router.Query + + test "`a=b&c=d` should be decoded as a map" do + assert %{"a" => "b", "c" => "d"} == Query.decode("a=b&c=d") + end + + test "`param=A¶m=B` should be decoded as a list" do + assert %{"param" => ["A", "B"]} == Query.decode("param=A¶m=B") + end + + test "`root.a=A&root.b=B` should be decoded as a nested map" do + assert %{"root" => %{"a" => "A", "b" => "B"}} == Query.decode("root.a=A&root.b=B") + end + + test "`root.a=A&root.a=B` should be decoded as a nested map with a list" do + assert %{"root" => %{"a" => ["A", "B"]}} == Query.decode("root.a=A&root.a=B") + end + + test "deeply nested map should be decoded" do + assert %{"root" => %{"a" => %{"b" => %{"c" => %{"d" => "A"}}}, "b" => "B"}, "c" => "C"} == + Query.decode("root.a.b.c.d=A&root.b=B&c=C") + end + + test "pairs without value are decoded as `\"\"`" do + assert %{"param" => "", "a" => "A"} == + Query.decode("param=&a=A") + end +end diff --git a/test/grpc/server/router/template_test.exs b/test/grpc/server/router/template_test.exs new file mode 100644 index 00000000..f19b9cd2 --- /dev/null +++ b/test/grpc/server/router/template_test.exs @@ -0,0 +1,175 @@ +defmodule GRPC.Server.Router.TemplateTest do + use ExUnit.Case, async: true + alias GRPC.Server.Router.Template + + describe "tokenize/2" do + test "can tokenize simple paths" do + assert [{:/, []}] = Template.tokenize("/") + + assert [{:/, []}, {:identifier, "v1", []}, {:/, []}, {:identifier, "messages", []}] = + Template.tokenize("/v1/messages") + end + + test "can tokenize simple paths with wildcards" do + assert [ + {:/, []}, + {:identifier, "v1", []}, + {:/, []}, + {:identifier, "messages", []}, + {:/, []}, + {:*, []} + ] == Template.tokenize("/v1/messages/*") + end + + test "can tokenize simple variables" do + assert [ + {:/, []}, + {:identifier, "v1", []}, + {:/, []}, + {:identifier, "messages", []}, + {:/, []}, + {:"{", []}, + {:identifier, "message_id", []}, + {:"}", []} + ] == Template.tokenize("/v1/messages/{message_id}") + end + + test "can tokenize variable assignments in bindings" do + assert [ + {:/, []}, + {:identifier, "v1", []}, + {:/, []}, + {:"{", []}, + {:identifier, "name", []}, + {:=, []}, + {:identifier, "messages", []}, + {:"}", []} + ] == Template.tokenize("/v1/{name=messages}") + end + + test "can tokenize variable sub-paths in bindings" do + assert [ + {:/, []}, + {:identifier, "v1", []}, + {:/, []}, + {:"{", []}, + {:identifier, "name", []}, + {:=, []}, + {:identifier, "messages", []}, + {:/, []}, + {:*, []}, + {:"}", []} + ] == Template.tokenize("/v1/{name=messages/*}") + end + + test "can tokenize field paths in bindings" do + assert [ + {:/, []}, + {:identifier, "v1", []}, + {:/, []}, + {:identifier, "messages", []}, + {:/, []}, + {:"{", []}, + {:identifier, "message_id", []}, + {:"}", []}, + {:/, []}, + {:"{", []}, + {:identifier, "sub.subfield", []}, + {:"}", []} + ] == Template.tokenize("/v1/messages/{message_id}/{sub.subfield}") + end + + test "can tokenize single wildcard" do + assert [{:/, []}, {:*, []}] == Template.tokenize("/*") + end + + test "can tokenize multiple wildcards" do + assert [ + {:/, []}, + {:*, []}, + {:/, []}, + {:*, []}, + {:*, []} + ] == Template.tokenize("/*/**") + end + end + + describe "parse/3" do + test "can parse simple paths" do + assert [] == + "/" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse paths with identifiers" do + assert ["v1", "messages"] == + "/v1/messages" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse paths with 'any'" do + assert ["v1", "messages", {:_, []}] == + "/v1/messages/*" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse paths with 'catch all'" do + assert ["v1", "messages", {:__, []}] == + "/v1/messages/**" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse simple bindings with variables" do + assert ["v1", "messages", {:message_id, []}] == + "/v1/messages/{message_id}" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse bindings with variable assignment" do + assert ["v1", {:name, ["messages", {:_, []}]}] == + "/v1/{name=messages/*}" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse bindings with variable assignment to any" do + assert ["v1", {:name, [{:_, []}]}] == + "/v1/{name=*}" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse multiple bindings with variable assignment" do + assert ["v1", {:name, ["messages"]}, {:message_id, []}] == + "/v1/{name=messages}/{message_id}" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse bindings with field paths" do + assert ["v1", "messages", {:"sub.subfield", []}] == + "/v1/messages/{sub.subfield}" + |> Template.tokenize() + |> Template.parse([]) + end + + test "supports deeper nested field path " do + assert ["v1", "messages", {:"sub.nested.nested.nested", []}] == + "/v1/messages/{sub.nested.nested.nested}" + |> Template.tokenize() + |> Template.parse([]) + end + + test "can parse multiple-bindings with field paths " do + assert ["v1", "messages", {:"first.subfield", []}, {:"second.subfield", []}] == + "/v1/messages/{first.subfield}/{second.subfield}" + |> Template.tokenize() + |> Template.parse([]) + end + end +end diff --git a/test/grpc/server/router_test.exs b/test/grpc/server/router_test.exs new file mode 100644 index 00000000..f6718309 --- /dev/null +++ b/test/grpc/server/router_test.exs @@ -0,0 +1,161 @@ +defmodule GRPC.Server.RouterTest do + use ExUnit.Case, async: true + alias GRPC.Server.Router + + describe "build_route/1" do + test "returns a route with {http_method, path, match} based on the template string" do + path = "/v1/messages/{message_id}" + + assert {:get, ^path, match} = Router.build_route(:get, "/v1/messages/{message_id}") + assert ["v1", "messages", {:message_id, []}] = match + end + + test "defaults to setting method to `:post` if no method was provided" do + path = "/transcode.Messaging/GetMessage" + + assert {:post, ^path, match} = Router.build_route(path) + assert ["transcode.Messaging", "GetMessage"] = match + end + + test "returns a route with {http_method, path, match} based HttRule" do + path = "/v1/messages/{message_id}" + rule = build_simple_rule(:get, "/v1/messages/{message_id}") + + assert {:get, ^path, match} = Router.build_route(rule) + assert ["v1", "messages", {:message_id, []}] = match + end + end + + describe "match/3" do + test "with no segments" do + assert {true, %{}} = Router.match("/", []) + end + + test "with segments and no matchers" do + refute Router.match("/foo", []) + end + + test "with no bindings" do + {_, _, match} = Router.build_route("/transcode.Messaging/GetMessage") + + assert {true, %{}} == Router.match("/transcode.Messaging/GetMessage", match) + assert false == Router.match("/transcode.Messaging/GetMessages", match) + end + + test "with simple bindings" do + {_, _, match} = Router.build_route(:get, "/v1/{name}") + + assert {true, %{name: "messages"}} == Router.match("/v1/messages", match) + end + + test "with nested bindings" do + {_, _, match} = Router.build_route(:get, "/v1/{message.name}") + + assert {true, %{"message.name": "messages"}} == Router.match("/v1/messages", match) + end + + test "with multiple bindings" do + {_, _, match} = + Router.build_route(:get, "/v1/users/{user_id}/messages/{message.message_id}") + + assert {true, %{user_id: "1", "message.message_id": "2"}} == + Router.match("/v1/users/1/messages/2", match) + end + + test "with multiple sequential bindings" do + {_, _, match} = Router.build_route("/v1/{a}/{b}/{c}") + + assert {true, %{a: "a", b: "b", c: "c"}} == Router.match("/v1/a/b/c", match) + end + + test "with 'any'" do + {_, _, match} = Router.build_route("/*") + + assert {true, %{}} == Router.match("/v1", match) + end + + test "with 'any' assignment" do + {_, _, match} = Router.build_route("/{a=*}") + + assert {true, %{a: "v1"}} == Router.match("/v1", match) + end + + test "with 'catch all' assignment" do + {_, _, match} = Router.build_route("/{a=**}") + + assert {true, %{a: "v1/messages"}} == Router.match("/v1/messages", match) + end + + test "with 'any' and 'catch all'" do + {_, _, match} = Router.build_route("/*/**") + assert {true, %{}} == Router.match("/v1/foo/bar/baz", match) + end + + test "with 'any' and 'catch all' assignment" do + {_, _, match} = Router.build_route("/*/a/{b=c/*}/d/{e=**}") + + assert {true, %{b: "c/foo", e: "bar/baz/biz"}} == + Router.match("/v1/a/c/foo/d/bar/baz/biz", match) + end + + test "with complex binding" do + {_, _, match} = Router.build_route("/v1/{name=messages}") + + assert {true, %{name: "messages"}} == Router.match("/v1/messages", match) + refute Router.match("/v1/should_not_match", match) + end + + test "with complex binding and 'any'" do + {_, _, match} = Router.build_route("/v1/{name=messages/*}") + + assert {true, %{name: "messages/12345"}} == Router.match("/v1/messages/12345", match) + refute Router.match("/v1/should_not_match/12345", match) + end + + test "with complex binding, wildcards and trailing route" do + {_, _, match} = Router.build_route("/v1/{name=shelves/*/books/*}") + + assert {true, %{name: "shelves/example-shelf/books/example-book"}} == + Router.match("/v1/shelves/example-shelf/books/example-book", match) + + refute Router.match("/v1/shelves/example-shelf/not_books", match) + end + + test "with complex binding, wildcards and suffix" do + {_, _, match} = Router.build_route("/v1/{name=shelves/*/books/*}/suffix") + + assert {true, %{name: "shelves/example-shelf/books/example-book"}} == + Router.match( + "/v1/shelves/example-shelf/books/example-book/suffix", + match + ) + + refute Router.match( + "/v1/shelves/example-shelf/books/example-book/another_suffix", + match + ) + end + + test "with mixed complex binding" do + {_, _, match} = Router.build_route("/v1/{a=users/*}/messages/{message_id}/{c=books/*}") + + assert {true, %{a: "users/foobar", message_id: "1", c: "books/barbaz"}} == + Router.match("/v1/users/foobar/messages/1/books/barbaz", match) + + assert false == Router.match("/v1/users/1/books/barbaz", match) + end + + test "with mixed sequential complex binding" do + {_, _, match} = Router.build_route("/v1/{a=users/*}/{b=messages}/{c=books/*}") + + assert {true, %{a: "users/foobar", b: "messages", c: "books/barbaz"}} == + Router.match("/v1/users/foobar/messages/books/barbaz", match) + + refute Router.match("/v1/users/foobar/messages/book/books/barbaz", match) + end + end + + defp build_simple_rule(method, pattern) do + %Google.Api.HttpRule{pattern: {method, pattern}} + end +end diff --git a/test/grpc/server/transcode_test.exs b/test/grpc/server/transcode_test.exs new file mode 100644 index 00000000..61995a85 --- /dev/null +++ b/test/grpc/server/transcode_test.exs @@ -0,0 +1,63 @@ +defmodule GRPC.TranscodeTest do + use ExUnit.Case, async: true + alias GRPC.Server.Transcode + + test "map_request/5 with HttpRule.body: '*'" do + rule = %Google.Api.HttpRule{body: "*"} + request_body = %{"latitude" => 1, "longitude" => 2} + bindings = %{} + qs = "latitude=10&longitude=20" + + assert {:ok, %Routeguide.Point{latitude: 1, longitude: 2}} = + Transcode.map_request(rule, request_body, bindings, qs, Routeguide.Point) + end + + test "map_request/5 with empty HttpRule.body" do + rule = %Google.Api.HttpRule{body: ""} + request_body = %{"latitude" => 10, "longitude" => 20} + bindings = %{"latitude" => 5} + qs = "longitude=6" + + assert {:ok, %Routeguide.Point{latitude: 5, longitude: 6}} = + Transcode.map_request(rule, request_body, bindings, qs, Routeguide.Point) + end + + test "map_request/2 with HttpRule.body: " do + rule = %Google.Api.HttpRule{body: "location"} + request_body = %{"latitude" => 1, "longitude" => 2} + bindings = %{"name" => "test"} + + assert {:ok, %Routeguide.Feature{name: "test", location: point}} = + Transcode.map_request(rule, request_body, bindings, "", Routeguide.Feature) + + assert point.latitude == 1 + assert point.longitude == 2 + end + + test "map_response_body/2 with empty HttpRule.response_body" do + rule = %Google.Api.HttpRule{response_body: ""} + request_body = %{message: %{a: "b"}} + + assert request_body == Transcode.map_response_body(rule, request_body) + end + + test "map_response_body/2 with HttpRule.response_body: " do + rule = %Google.Api.HttpRule{response_body: "message"} + request_body = %{message: %{a: "b"}} + + assert %{a: "b"} == Transcode.map_response_body(rule, request_body) + end + + test "map_route_bindings/2 should stringify the keys" do + path_binding_atom = %{foo: "bar"} + path_binding_string = %{foo: "bar"} + + assert %{"foo" => "bar"} == Transcode.map_path_bindings(path_binding_atom) + assert %{"foo" => "bar"} == Transcode.map_path_bindings(path_binding_string) + end + + test "map_route_bindings/2 with '.' delimited identifiers should create a nested map" do + path_binding = %{"foo.bar.baz" => "biz"} + assert %{"foo" => %{"bar" => %{"baz" => "biz"}}} == Transcode.map_path_bindings(path_binding) + end +end diff --git a/test/support/feature_server.ex b/test/support/feature_server.ex index a3e70c7e..d37225fc 100644 --- a/test/support/feature_server.ex +++ b/test/support/feature_server.ex @@ -3,7 +3,7 @@ defmodule FeatureServer do def get_feature(point, _stream) do if point.latitude != 0 do - Routeguide.Feature.new(location: point, name: "#{point.latitude},#{point.longitude}") + %Routeguide.Feature{location: point, name: "#{point.latitude},#{point.longitude}"} else {:error, "server error"} end diff --git a/test/support/google/api/annotations.pb.ex b/test/support/google/api/annotations.pb.ex new file mode 100644 index 00000000..374877d3 --- /dev/null +++ b/test/support/google/api/annotations.pb.ex @@ -0,0 +1,8 @@ +defmodule Google.Api.PbExtension do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + extend Google.Protobuf.MethodOptions, :http, 72_295_728, + optional: true, + type: Google.Api.HttpRule +end diff --git a/test/support/google/api/annotations.proto b/test/support/google/api/annotations.proto new file mode 100644 index 00000000..efdab3db --- /dev/null +++ b/test/support/google/api/annotations.proto @@ -0,0 +1,31 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/test/support/google/api/http.pb.ex b/test/support/google/api/http.pb.ex new file mode 100644 index 00000000..ebd043a6 --- /dev/null +++ b/test/support/google/api/http.pb.ex @@ -0,0 +1,40 @@ +defmodule Google.Api.Http do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :rules, 1, repeated: true, type: Google.Api.HttpRule + + field :fully_decode_reserved_expansion, 2, + type: :bool, + json_name: "fullyDecodeReservedExpansion" +end + +defmodule Google.Api.HttpRule do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + oneof :pattern, 0 + + field :selector, 1, type: :string + field :get, 2, type: :string, oneof: 0 + field :put, 3, type: :string, oneof: 0 + field :post, 4, type: :string, oneof: 0 + field :delete, 5, type: :string, oneof: 0 + field :patch, 6, type: :string, oneof: 0 + field :custom, 8, type: Google.Api.CustomHttpPattern, oneof: 0 + field :body, 7, type: :string + field :response_body, 12, type: :string, json_name: "responseBody" + + field :additional_bindings, 11, + repeated: true, + type: Google.Api.HttpRule, + json_name: "additionalBindings" +end + +defmodule Google.Api.CustomHttpPattern do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :kind, 1, type: :string + field :path, 2, type: :string +end diff --git a/test/support/google/api/http.proto b/test/support/google/api/http.proto new file mode 100644 index 00000000..113fa936 --- /dev/null +++ b/test/support/google/api/http.proto @@ -0,0 +1,375 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} diff --git a/test/support/google/protobuf/descriptor.proto b/test/support/google/protobuf/descriptor.proto new file mode 100644 index 00000000..9b11d24a --- /dev/null +++ b/test/support/google/protobuf/descriptor.proto @@ -0,0 +1,1223 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + +syntax = "proto2"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// The full set of known editions. +enum Edition { + // A placeholder for an unknown edition value. + EDITION_UNKNOWN = 0; + + // Legacy syntax "editions". These pre-date editions, but behave much like + // distinct editions. These can't be used to specify the edition of proto + // files, but feature definitions must supply proto2/proto3 defaults for + // backwards compatibility. + EDITION_PROTO2 = 998; + EDITION_PROTO3 = 999; + + // Editions that have been released. The specific values are arbitrary and + // should not be depended on, but they will always be time-ordered for easy + // comparison. + EDITION_2023 = 1000; + EDITION_2024 = 1001; + + // Placeholder editions for testing feature resolution. These should not be + // used or relyed on outside of tests. + EDITION_1_TEST_ONLY = 1; + EDITION_2_TEST_ONLY = 2; + EDITION_99997_TEST_ONLY = 99997; + EDITION_99998_TEST_ONLY = 99998; + EDITION_99999_TEST_ONLY = 99999; + + // Placeholder for specifying unbounded edition support. This should only + // ever be used by plugins that can expect to never require any changes to + // support a new edition. + EDITION_MAX = 0x7FFFFFFF; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2", "proto3", and "editions". + // + // If `edition` is present, this value must be "editions". + optional string syntax = 12; + + // The edition of the proto file. + optional Edition edition = 14; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + message Declaration { + // The extension number declared within the extension range. + optional int32 number = 1; + + // The fully-qualified name of the extension field. There must be a leading + // dot in front of the full name. + optional string full_name = 2; + + // The fully-qualified type name of the extension field. Unlike + // Metadata.type, Declaration.type must have a leading dot for messages + // and enums. + optional string type = 3; + + // If true, indicates that the number is reserved in the extension range, + // and any extension field with the number will fail to compile. Set this + // when a declared extension field is deleted. + optional bool reserved = 5; + + // If true, indicates that the extension must be defined as repeated. + // Otherwise the extension must be defined as optional. + optional bool repeated = 6; + + reserved 4; // removed is_repeated + } + + // For external users: DO NOT USE. We are in the process of open sourcing + // extension declaration and executing internal cleanups before it can be + // used externally. + repeated Declaration declaration = 2 [retention = RETENTION_SOURCE]; + + // Any features defined in the specific edition. + optional FeatureSet features = 50; + + // The verification state of the extension range. + enum VerificationState { + // All the extensions of the range must be declared. + DECLARATION = 0; + UNVERIFIED = 1; + } + + // The verification state of the range. + // TODO: flip the default to DECLARATION once all empty ranges + // are marked as UNVERIFIED. + optional VerificationState verification = 3 + [default = UNVERIFIED, retention = RETENTION_SOURCE]; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported after google.protobuf. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. In Editions, the group wire format + // can be enabled via the `message_encoding` feature. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REPEATED = 3; + // The required label is only allowed in google.protobuf. In proto3 and Editions + // it's explicitly prohibited. In Editions, the `field_presence` feature + // can be used to get this behavior. + LABEL_REQUIRED = 2; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must belong to a oneof to signal + // to old proto3 clients that presence is tracked for this field. This oneof + // is known as a "synthetic" oneof, and this field must be its sole member + // (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs + // exist in the descriptor only, and do not generate any API. Synthetic oneofs + // must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + // Controls the name of the wrapper Java class generated for the .proto file. + // That class will always contain the .proto file's getDescriptor() method as + // well as any top-level extensions defined in the .proto file. + // If java_multiple_files is disabled, then all the other classes from the + // .proto file will be nested inside the single wrapper outer class. + optional string java_outer_classname = 8; + + // If enabled, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the wrapper class + // named by java_outer_classname. However, the wrapper class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // A proto2 file can set this to true to opt in to UTF-8 checking for Java, + // which will throw an exception if invalid UTF-8 is parsed from the wire or + // assigned to a string field. + // + // TODO: clarify exactly what kinds of field types this option + // applies to, and update these docs accordingly. + // + // Proto3 files already perform these checks. Setting the option explicitly to + // false has no effect: it cannot be used to opt proto3 files out of UTF-8 + // checks. + optional bool java_string_check_utf8 = 27 [default = false]; + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + reserved 42; // removed php_generic_services + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + // Any features defined in the specific edition. + optional FeatureSet features = 50; + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + reserved 4, 5, 6; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // + // This should only be used as a temporary measure against broken builds due + // to the change in behavior for JSON field name conflicts. + // + // TODO This is legacy behavior we plan to remove once downstream + // teams have had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 11 [deprecated = true]; + + // Any features defined in the specific edition. + optional FeatureSet features = 12; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is only implemented to support use of + // [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + // type "bytes" in the open source release -- sorry, we'll try to include + // other types in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + // The option [ctype=CORD] may be applied to a non-repeated field of type + // "bytes". It indicates that in C++, the data should be stored in a Cord + // instead of a string. For very large strings, this may reduce memory + // fragmentation. It may also allow better performance when parsing from a + // Cord, or when parsing with aliasing enabled, as the parsed Cord may then + // alias the original buffer. + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. This option is prohibited in + // Editions, but the `repeated_field_encoding` feature can be used to control + // the behavior. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // Note that lazy message fields are still eagerly verified to check + // ill-formed wireformat or missing required fields. Calling IsInitialized() + // on the outer message would fail if the inner message has missing required + // fields. Failed verification would result in parsing failure (except when + // uninitialized messages are acceptable). + optional bool lazy = 5 [default = false]; + + // unverified_lazy does no correctness checks on the byte stream. This should + // only be used where lazy with verification is prohibitive for performance + // reasons. + optional bool unverified_lazy = 15 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + // Indicate that the field value should not be printed out when using debug + // formats, e.g. when the field contains sensitive credentials. + optional bool debug_redact = 16 [default = false]; + + // If set to RETENTION_SOURCE, the option will be omitted from the binary. + // Note: as of January 2023, support for this is in progress and does not yet + // have an effect (b/264593489). + enum OptionRetention { + RETENTION_UNKNOWN = 0; + RETENTION_RUNTIME = 1; + RETENTION_SOURCE = 2; + } + + optional OptionRetention retention = 17; + + // This indicates the types of entities that the field may apply to when used + // as an option. If it is unset, then the field may be freely used as an + // option on any kind of entity. Note: as of January 2023, support for this is + // in progress and does not yet have an effect (b/264593489). + enum OptionTargetType { + TARGET_TYPE_UNKNOWN = 0; + TARGET_TYPE_FILE = 1; + TARGET_TYPE_EXTENSION_RANGE = 2; + TARGET_TYPE_MESSAGE = 3; + TARGET_TYPE_FIELD = 4; + TARGET_TYPE_ONEOF = 5; + TARGET_TYPE_ENUM = 6; + TARGET_TYPE_ENUM_ENTRY = 7; + TARGET_TYPE_SERVICE = 8; + TARGET_TYPE_METHOD = 9; + } + + repeated OptionTargetType targets = 19; + + message EditionDefault { + optional Edition edition = 3; + optional string value = 2; // Textproto value. + } + repeated EditionDefault edition_defaults = 20; + + // Any features defined in the specific edition. + optional FeatureSet features = 21; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype + reserved 18; // reserve target, target_obsolete_do_not_use +} + +message OneofOptions { + // Any features defined in the specific edition. + optional FeatureSet features = 1; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // TODO Remove this legacy behavior once downstream teams have + // had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 6 [deprecated = true]; + + // Any features defined in the specific edition. + optional FeatureSet features = 7; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // Any features defined in the specific edition. + optional FeatureSet features = 2; + + // Indicate that fields annotated with this enum value should not be printed + // out when using debug formats, e.g. when the field contains sensitive + // credentials. + optional bool debug_redact = 3 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Any features defined in the specific edition. + optional FeatureSet features = 34; + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // Any features defined in the specific edition. + optional FeatureSet features = 35; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + // "foo.(bar.baz).moo". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Features + +// TODO Enums in C++ gencode (and potentially other languages) are +// not well scoped. This means that each of the feature enums below can clash +// with each other. The short names we've chosen maximize call-site +// readability, but leave us very open to this scenario. A future feature will +// be designed and implemented to handle this, hopefully before we ever hit a +// conflict here. +message FeatureSet { + enum FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0; + EXPLICIT = 1; + IMPLICIT = 2; + LEGACY_REQUIRED = 3; + } + optional FieldPresence field_presence = 1 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "EXPLICIT" }, + edition_defaults = { edition: EDITION_PROTO3, value: "IMPLICIT" }, + edition_defaults = { edition: EDITION_2023, value: "EXPLICIT" } + ]; + + enum EnumType { + ENUM_TYPE_UNKNOWN = 0; + OPEN = 1; + CLOSED = 2; + } + optional EnumType enum_type = 2 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_ENUM, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "CLOSED" }, + edition_defaults = { edition: EDITION_PROTO3, value: "OPEN" } + ]; + + enum RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0; + PACKED = 1; + EXPANDED = 2; + } + optional RepeatedFieldEncoding repeated_field_encoding = 3 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "EXPANDED" }, + edition_defaults = { edition: EDITION_PROTO3, value: "PACKED" } + ]; + + enum Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0; + VERIFY = 2; + NONE = 3; + } + optional Utf8Validation utf8_validation = 4 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "NONE" }, + edition_defaults = { edition: EDITION_PROTO3, value: "VERIFY" } + ]; + + enum MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0; + LENGTH_PREFIXED = 1; + DELIMITED = 2; + } + optional MessageEncoding message_encoding = 5 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "LENGTH_PREFIXED" } + ]; + + enum JsonFormat { + JSON_FORMAT_UNKNOWN = 0; + ALLOW = 1; + LEGACY_BEST_EFFORT = 2; + } + optional JsonFormat json_format = 6 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_MESSAGE, + targets = TARGET_TYPE_ENUM, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "LEGACY_BEST_EFFORT" }, + edition_defaults = { edition: EDITION_PROTO3, value: "ALLOW" } + ]; + + reserved 999; + + extensions 1000; // for Protobuf C++ + extensions 1001; // for Protobuf Java + extensions 1002; // for Protobuf Go + + extensions 9995 to 9999; // For internal testing + extensions 10000; // for https://github.com/bufbuild/protobuf-es +} + +// A compiled specification for the defaults of a set of features. These +// messages are generated from FeatureSet extensions and can be used to seed +// feature resolution. The resolution with this object becomes a simple search +// for the closest matching edition, followed by proto merges. +message FeatureSetDefaults { + // A map from every known edition with a unique set of defaults to its + // defaults. Not all editions may be contained here. For a given edition, + // the defaults at the closest matching edition ordered at or before it should + // be used. This field must be in strict ascending order by edition. + message FeatureSetEditionDefault { + optional Edition edition = 3; + optional FeatureSet features = 2; + } + repeated FeatureSetEditionDefault defaults = 1; + + // The minimum supported edition (inclusive) when this was constructed. + // Editions before this will not have defaults. + optional Edition minimum_edition = 4; + + // The maximum known edition (inclusive) when this was constructed. Editions + // after this will not have reliable defaults. + optional Edition maximum_edition = 5; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition appears. + // For example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to moo. + // // + // // Another line attached to moo. + // optional double moo = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to moo or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified object. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + + // Represents the identified object's effect on the element in the original + // .proto file. + enum Semantic { + // There is no effect or the effect is indescribable. + NONE = 0; + // The element is set or otherwise mutated. + SET = 1; + // An alias to the element is returned. + ALIAS = 2; + } + optional Semantic semantic = 5; + } +} \ No newline at end of file diff --git a/test/support/proto/helloworld.pb.ex b/test/support/proto/helloworld.pb.ex index 02e5b1d9..a18a68fc 100644 --- a/test/support/proto/helloworld.pb.ex +++ b/test/support/proto/helloworld.pb.ex @@ -1,33 +1,33 @@ defmodule Helloworld.HelloRequest do @moduledoc false - use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + use Protobuf, protoc_gen_elixir_version: "0.10.0", syntax: :proto3 field :name, 1, type: :string - field :duration, 2, proto3_optional: true, type: :int32 + field :duration, 2, type: :int32 end defmodule Helloworld.HelloReply do @moduledoc false - use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + use Protobuf, protoc_gen_elixir_version: "0.10.0", syntax: :proto3 field :message, 1, type: :string end defmodule Helloworld.HeaderRequest do @moduledoc false - use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + use Protobuf, protoc_gen_elixir_version: "0.10.0", syntax: :proto3 end defmodule Helloworld.HeaderReply do @moduledoc false - use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + use Protobuf, protoc_gen_elixir_version: "0.10.0", syntax: :proto3 field :authorization, 1, type: :string end defmodule Helloworld.Greeter.Service do @moduledoc false - use GRPC.Service, name: "helloworld.Greeter", protoc_gen_elixir_version: "0.11.0" + use GRPC.Service, name: "helloworld.Greeter", protoc_gen_elixir_version: "0.10.0" rpc :SayHello, Helloworld.HelloRequest, Helloworld.HelloReply diff --git a/test/support/route_guide_transcode.pb.ex b/test/support/route_guide_transcode.pb.ex new file mode 100644 index 00000000..1079307f --- /dev/null +++ b/test/support/route_guide_transcode.pb.ex @@ -0,0 +1,46 @@ +defmodule RouteguideTranscode.Point do + @moduledoc false + + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :latitude, 1, type: :int32 + field :longitude, 2, type: :int32 +end + +defmodule RouteguideTranscode.Rectangle do + @moduledoc false + + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :lo, 1, type: RouteguideTranscode.Point + field :hi, 2, type: RouteguideTranscode.Point +end + +defmodule RouteguideTranscode.Feature do + @moduledoc false + + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :name, 1, type: :string + field :location, 2, type: RouteguideTranscode.Point +end + +defmodule RouteguideTranscode.RouteNote do + @moduledoc false + + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :location, 1, type: RouteguideTranscode.Point + field :message, 2, type: :string +end + +defmodule RouteguideTranscode.RouteSummary do + @moduledoc false + + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :point_count, 1, type: :int32, json_name: "pointCount" + field :feature_count, 2, type: :int32, json_name: "featureCount" + field :distance, 3, type: :int32 + field :elapsed_time, 4, type: :int32, json_name: "elapsedTime" +end diff --git a/test/support/transcode_messages.pb.ex b/test/support/transcode_messages.pb.ex new file mode 100644 index 00000000..bfc042d5 --- /dev/null +++ b/test/support/transcode_messages.pb.ex @@ -0,0 +1,159 @@ +defmodule Transcode.MessageOut do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :response, 1, type: Transcode.Message +end + +defmodule Transcode.GetMessageRequest do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :name, 1, type: :string +end + +defmodule Transcode.Message do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :name, 1, type: :string + field :text, 2, type: :string +end + +defmodule Transcode.NestedMessageRequest do + @moduledoc false + use Protobuf, protoc_gen_elixir_version: "0.11.0", syntax: :proto3 + + field :message, 1, type: Transcode.GetMessageRequest +end + +defmodule Transcode.Messaging.Service do + @moduledoc false + use GRPC.Service, name: "transcode.Messaging", protoc_gen_elixir_version: "0.11.0" + + rpc(:GetMessage, Transcode.GetMessageRequest, Transcode.Message, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/messages/{name}"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:StreamMessages, Transcode.GetMessageRequest, stream(Transcode.Message), %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/messages/stream/{name}"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:GetMessageWithSubPath, Transcode.GetMessageRequest, Transcode.Message, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/{name=}"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:GetMessageWithQuery, Transcode.GetMessageRequest, Transcode.Message, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/messages"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:GetMessageWithFieldPath, Transcode.NestedMessageRequest, Transcode.Message, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/messages/fieldpath/{message.name}"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:CreateMessage, Transcode.Message, Transcode.Message, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "*", + pattern: {:post, "/v1/messages"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:GetMessageWithResponseBody, Transcode.GetMessageRequest, Transcode.MessageOut, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/messages/response_body/{name}"}, + response_body: "response", + selector: "" + } + } + }) + + rpc(:CreateMessageWithNestedBody, Transcode.NestedMessageRequest, Transcode.Message, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "message", + pattern: {:post, "/v1/messages/nested"}, + response_body: "", + selector: "" + } + } + }) + + rpc(:GetMessageWithSubpathQuery, Transcode.NestedMessageRequest, Transcode.Message, %{ + http: %{ + type: Google.Api.PbExtension, + value: %Google.Api.HttpRule{ + __unknown_fields__: [], + additional_bindings: [], + body: "", + pattern: {:get, "/v1/messages/nested"}, + response_body: "", + selector: "" + } + } + }) +end diff --git a/test/support/transcode_messages.proto b/test/support/transcode_messages.proto new file mode 100644 index 00000000..273e3127 --- /dev/null +++ b/test/support/transcode_messages.proto @@ -0,0 +1,81 @@ +syntax = "proto3"; + +import "google/api/annotations.proto"; + +package transcode; + +service Messaging { + rpc GetMessage(GetMessageRequest) returns (Message) { + option (google.api.http) = { + get: "/v1/messages/{name}" + }; + } + + rpc StreamMessages(GetMessageRequest) returns (stream Message) { + option (google.api.http) = { + get: "/v1/messages/stream/{name}" + }; + } + + rpc GetMessageWithSubPath(GetMessageRequest) returns (Message) { + option (google.api.http) = { + get: "/v1/{name=}" + }; + } + + rpc GetMessageWithQuery(GetMessageRequest) returns (Message) { + option (google.api.http) = { + get: "/v1/messages" + }; + } + + rpc GetMessageWithFieldPath(NestedMessageRequest) returns (Message) { + option (google.api.http) = { + get: "/v1/messages/fieldpath/{message.name}" + }; + } + + rpc CreateMessage(Message) returns (Message) { + option (google.api.http) = { + post: "/v1/messages" + body: "*" + }; + } + + rpc GetMessageWithResponseBody(GetMessageRequest) returns (MessageOut) { + option (google.api.http) = { + get: "/v1/messages/response_body/{name}", + response_body: "response" + }; + } + + rpc CreateMessageWithNestedBody(NestedMessageRequest) returns (Message) { + option (google.api.http) = { + post: "/v1/messages/nested", + body: "message" + }; + } + + rpc GetMessageWithSubpathQuery(NestedMessageRequest) returns (Message) { + option (google.api.http) = { + get: "/v1/messages/nested" + }; + } +} + +message MessageOut { + Message response = 1; +} + +message GetMessageRequest { + string name = 1; // Mapped to URL path. +} + +message Message { + string name = 1; + string text = 2; +} + +message NestedMessageRequest { + GetMessageRequest message = 1; +}