From 1f78401f39553ebeb571f436cfb1888cf32b5ee1 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Wed, 2 Jul 2025 20:52:31 +0530 Subject: [PATCH 01/26] protobuf codec, codegen, interop test --- .github/workflows/CI.yml | 19 ++ Cargo.toml | 1 + compiler/.gitignore | 9 + compiler/MODULE.bazel | 15 + compiler/README.md | 29 ++ compiler/src/BUILD | 13 + compiler/src/grpc_rust_generator.cc | 472 ++++++++++++++++++++++++++++ compiler/src/grpc_rust_generator.h | 56 ++++ compiler/src/grpc_rust_plugin.cc | 138 ++++++++ grpc-build/Cargo.toml | 9 + grpc-build/src/lib.rs | 195 ++++++++++++ grpc/Cargo.toml | 3 +- grpc/src/codec.rs | 103 ++++++ grpc/src/lib.rs | 2 + grpc/src/macros.rs | 57 ++++ interop/Cargo.toml | 7 + interop/build.rs | 5 + interop/src/bin/client_grpc.rs | 136 ++++++++ interop/src/client_grpc.rs | 412 ++++++++++++++++++++++++ interop/src/lib.rs | 31 ++ interop/test.sh | 3 + 21 files changed, 1714 insertions(+), 1 deletion(-) create mode 100644 compiler/.gitignore create mode 100644 compiler/MODULE.bazel create mode 100644 compiler/README.md create mode 100644 compiler/src/BUILD create mode 100644 compiler/src/grpc_rust_generator.cc create mode 100644 compiler/src/grpc_rust_generator.h create mode 100644 compiler/src/grpc_rust_plugin.cc create mode 100644 grpc-build/Cargo.toml create mode 100644 grpc-build/src/lib.rs create mode 100644 grpc/src/codec.rs create mode 100644 grpc/src/macros.rs create mode 100644 interop/src/bin/client_grpc.rs create mode 100644 interop/src/client_grpc.rs diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 49dfd0da8..27411add8 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -141,6 +141,25 @@ jobs: - uses: actions/checkout@v4 - uses: hecrj/setup-rust-action@v2 - uses: taiki-e/install-action@protoc + - name: Install Bazel + uses: bazel-contrib/setup-bazel@0.15.0 + with: + # Avoid downloading Bazel every time. + bazelisk-cache: true + # Store build cache per workflow. + disk-cache: ${{ github.workflow }} + # Share repository cache between workflows. + repository-cache: true + module-root: ./compiler + - name: Build and protoc plugin and dd to PATH + id: build_step + # This runs all commands within the compiler/ directory + working-directory: ./compiler + run: | + bazel build //src:protoc-gen-rust-grpc + + # Add the output directory to the GitHub PATH for subsequent steps + echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 - name: Run interop tests run: ./interop/test.sh diff --git a/Cargo.toml b/Cargo.toml index ce9bc4d43..604a2e118 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ members = [ "examples", "codegen", "grpc", + "grpc-build", "interop", # Tests "tests/disable_comments", "tests/included_service", diff --git a/compiler/.gitignore b/compiler/.gitignore new file mode 100644 index 000000000..1707cb793 --- /dev/null +++ b/compiler/.gitignore @@ -0,0 +1,9 @@ +# Bazel +bazel-bin +bazel-examples +bazel-genfiles +bazel-grpc-java +bazel-out +bazel-testlogs +MODULE.bazel.lock + diff --git a/compiler/MODULE.bazel b/compiler/MODULE.bazel new file mode 100644 index 000000000..da3a3b966 --- /dev/null +++ b/compiler/MODULE.bazel @@ -0,0 +1,15 @@ +bazel_dep(name = "protobuf", repo_name = "com_google_protobuf", version = "31.1") + +# Hedron's Compile Commands Extractor for bazel +# This is used to generate a compile_commands.json file which can be used by +# LSP servers like clangd. +# https://github.com/hedronvision/bazel-compile-commands-extractor +bazel_dep(name = "hedron_compile_commands", dev_dependency = True) +git_override( + module_name = "hedron_compile_commands", + # Using a commit from a fork to workaround failures while using absl. + # TODO: replace with a commit on the official repo once the following PR is + # merged: https://github.com/hedronvision/bazel-compile-commands-extractor/pull/219 + remote = "https://github.com/mikael-s-persson/bazel-compile-commands-extractor", + commit = "f5fbd4cee671d8d908f37c83abaf70fba5928fc7" +) diff --git a/compiler/README.md b/compiler/README.md new file mode 100644 index 000000000..2dd472c27 --- /dev/null +++ b/compiler/README.md @@ -0,0 +1,29 @@ +## Usage example +```sh +# Build the plugin with Bazel +bazel build //src:protoc-gen-rust-grpc + +# Set the plugin path +PLUGIN_PATH="$(pwd)/bazel-bin/src/protoc-gen-rust-grpc" + +# Run protoc with the Rust and gRPC plugins +protoc \ + --plugin=protoc-gen-grpc-rust="$PLUGIN_PATH" \ + --rust_opt="experimental-codegen=enabled,kernel=upb" \ + --rust_out=./tmp \ + --rust-grpc_opt="experimental-codegen=enabled" \ + --rust-grpc_out=./tmp \ + routeguide.proto +``` + +## Build +```sh +bazel build //src:protoc-gen-rust-grpc +``` + +## Language Server Support for development +Generate compile_commands.json using bazel plugin. Configure the language +server to use the generate json file. +```sh +bazel run @hedron_compile_commands//:refresh_all +``` diff --git a/compiler/src/BUILD b/compiler/src/BUILD new file mode 100644 index 000000000..97388b7ea --- /dev/null +++ b/compiler/src/BUILD @@ -0,0 +1,13 @@ +cc_binary( + name = "protoc-gen-rust-grpc", + srcs = [ + "grpc_rust_plugin.cc", + "grpc_rust_generator.h", + "grpc_rust_generator.cc", + ], + visibility = ["//visibility:public"], + deps = [ + "@com_google_protobuf//:protoc_lib", + ], +) + diff --git a/compiler/src/grpc_rust_generator.cc b/compiler/src/grpc_rust_generator.cc new file mode 100644 index 000000000..fcc3aeee8 --- /dev/null +++ b/compiler/src/grpc_rust_generator.cc @@ -0,0 +1,472 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + +#include "src/grpc_rust_generator.h" + +#include "absl/strings/str_replace.h" +#include "absl/strings/str_split.h" +#include "absl/strings/string_view.h" +#include +#include +#include +#include +#include +#include + +#include + +namespace rust_grpc_generator { +namespace protobuf = google::protobuf; +namespace rust = protobuf::compiler::rust; + +using protobuf::Descriptor; +using protobuf::MethodDescriptor; +using protobuf::ServiceDescriptor; +using protobuf::SourceLocation; +using protobuf::compiler::rust::Context; + +template +static std::string +GrpcGetCommentsForDescriptor(const DescriptorType *descriptor) { + SourceLocation location; + if (descriptor->GetSourceLocation(&location)) { + return location.leading_comments.empty() ? location.trailing_comments + : location.leading_comments; + } + return std::string(); +} + +/// Returns the path of a generated message struct relative to the module in the +/// generated service code. +static std::string RsTypePath(Context &ctx, + const absl::string_view &path_within_module, + const GrpcOpts &opts, int depth) { + // If the message type is defined in an external crate using the crate + // mapping, the path must begin ::. If the message type is in the same + // crate, add the relative path to the message module. + if (absl::StartsWith(path_within_module, "::")) { + return std::string(path_within_module); + } + std::string prefix = ""; + for (int i = 0; i < depth; ++i) { + prefix += "super::"; + } + std::string path_to_message_module = opts.message_module_path + "::"; + if (path_to_message_module == "self::") { + path_to_message_module = ""; + } + return prefix + path_to_message_module + std::string(path_within_module); +} + +/** + * Method generation abstraction. + * + * Each service contains a set of generic methods that will be used by codegen + * to generate abstraction implementations for the provided methods. + */ +class Method { +private: + const MethodDescriptor *method_; + +public: + Method() = delete; + + Method(const MethodDescriptor *method) : method_(method) {} + + /// The name of the method in Rust style. + std::string name() const { + return rust::RsSafeName(rust::CamelToSnakeCase(method_->name())); + }; + + /// The fully-qualified name of the method, scope delimited by periods. + absl::string_view full_name() const { return method_->full_name(); } + + /// The name of the method as it appears in the .proto file. + absl::string_view proto_field_name() const { return method_->name(); }; + + /// Checks if the method is streamed by the client. + bool is_client_streaming() const { return method_->client_streaming(); }; + + /// Checks if the method is streamed by the server. + bool is_server_streaming() const { return method_->server_streaming(); }; + + /// Get comments about this method. + std::string comment() const { return GrpcGetCommentsForDescriptor(method_); }; + + /// Checks if the method is deprecated. Default is false. + bool is_deprecated() const { return method_->options().deprecated(); } + + /** + * Type name of request. + * @param proto_path The path to the proto file, for context. + * @return A string representing the qualified name for the generated request + * struct. + */ + std::string request_name(rust::Context &ctx) const { + const Descriptor *input = method_->input_type(); + return rust::RsTypePath(ctx, *input); + }; + + /** + * Type name of response. + * @param proto_path The path to the proto file, for context. + * @return A string representing the qualified name for the generated response + * struct. + */ + std::string response_name(rust::Context &ctx) const { + const Descriptor *output = method_->output_type(); + return rust::RsTypePath(ctx, *output); + }; +}; + +/** + * Service generation abstraction. + * + * This class is an interface that can be implemented and consumed + * by client and server generators to allow any codegen module + * to generate service abstractions. + */ +class Service { +private: + const ServiceDescriptor *service_; + +public: + Service() = delete; + + Service(const ServiceDescriptor *service) : service_(service) {} + + /// The name of the service, not including its containing scope. + std::string name() const { + return rust::RsSafeName(rust::SnakeToUpperCamelCase(service_->name())); + }; + + /// The fully-qualified name of the service, scope delimited by periods. + absl::string_view full_name() const { return service_->full_name(); }; + + /** + * Methods provided by the service. + * @return A span of non-owning pointers to the Method objects. The Service + * implementation is expected to manage the lifetime of these objects. + */ + std::vector methods() const { + std::vector ret; + int methods_count = service_->method_count(); + ret.reserve(methods_count); + for (int i = 0; i < methods_count; ++i) { + ret.push_back(Method(service_->method(i))); + } + return ret; + }; + + /// Get comments about this service. + virtual std::string comment() const { + return GrpcGetCommentsForDescriptor(service_); + }; +}; + +/** + * @brief Formats the full path for a method call. + * @param service The service containing the method. + * @param method The method to format the path for. + * @param emit_package If true, the service name will include its package. + * @return The formatted method path (e.g., "/package.MyService/MyMethod"). + */ +static std::string FormatMethodPath(const Service &service, + const Method &method) { + return absl::StrFormat("/%s/%s", service.full_name(), + method.proto_field_name()); +} + +static std::string SanitizeForRustDoc(absl::string_view raw_comment) { + // 1. Escape the escape character itself first. + std::string sanitized = absl::StrReplaceAll(raw_comment, {{"\\", "\\\\"}}); + + // 2. Escape Markdown and Rustdoc special characters. + sanitized = absl::StrReplaceAll(sanitized, { + {"`", "\\`"}, + {"*", "\\*"}, + {"_", "\\_"}, + {"[", "\\["}, + {"]", "\\]"}, + {"#", "\\#"}, + {"<", "\\<"}, + {">", "\\>"}, + }); + + return sanitized; +} + +static std::string ProtoCommentToRustDoc(absl::string_view proto_comment) { + std::string rust_doc; + std::vector lines = absl::StrSplit(proto_comment, '\n'); + for (const std::string &line : lines) { + // Preserve empty lines. + if (line.empty()) { + rust_doc += ("///\n"); + } else { + rust_doc += absl::StrFormat("/// %s\n", SanitizeForRustDoc(line)); + } + } + return rust_doc; +} + +static void GenerateDeprecated(Context &ctx) { ctx.Emit("#[deprecated]\n"); } + +namespace client { + +static void GenerateMethods(Context &ctx, const Service &service, + const GrpcOpts &opts) { + static std::string unary_format = R"rs( + pub async fn $ident$( + &mut self, + request: impl tonic::IntoRequest<$request$>, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = $codec_name$::default(); + let path = http::uri::PathAndQuery::from_static("$path$"); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new("$service_name$", "$method_name$")); + self.inner.unary(req, path, codec).await + } + )rs"; + + static std::string server_streaming_format = R"rs( + pub async fn $ident$( + &mut self, + request: impl tonic::IntoRequest<$request$>, + ) -> std::result::Result>, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = $codec_name$::default(); + let path = http::uri::PathAndQuery::from_static("$path$"); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new("$service_name$", "$method_name$")); + self.inner.server_streaming(req, path, codec).await + } + )rs"; + + static std::string client_streaming_format = R"rs( + pub async fn $ident$( + &mut self, + request: impl tonic::IntoStreamingRequest + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = $codec_name$::default(); + let path = http::uri::PathAndQuery::from_static("$path$"); + let mut req = request.into_streaming_request(); + req.extensions_mut().insert(GrpcMethod::new("$service_name$", "$method_name$")); + self.inner.client_streaming(req, path, codec).await + } + )rs"; + + static std::string streaming_format = R"rs( + pub async fn $ident$( + &mut self, + request: impl tonic::IntoStreamingRequest + ) -> std::result::Result>, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = $codec_name$::default(); + let path = http::uri::PathAndQuery::from_static("$path$"); + let mut req = request.into_streaming_request(); + req.extensions_mut().insert(GrpcMethod::new("$service_name$", "$method_name$")); + self.inner.streaming(req, path, codec).await + } + )rs"; + + const std::vector methods = service.methods(); + for (const Method &method : methods) { + ctx.Emit(ProtoCommentToRustDoc(method.comment())); + if (method.is_deprecated()) { + GenerateDeprecated(ctx); + } + const std::string request_type = + RsTypePath(ctx, method.request_name(ctx), opts, 1); + const std::string response_type = + RsTypePath(ctx, method.response_name(ctx), opts, 1); + { + auto vars = + ctx.printer().WithVars({{"codec_name", "grpc::codec::ProtoCodec"}, + {"ident", method.name()}, + {"request", request_type}, + {"response", response_type}, + {"service_name", service.full_name()}, + {"path", FormatMethodPath(service, method)}, + {"method_name", method.proto_field_name()}}); + + if (!method.is_client_streaming() && !method.is_server_streaming()) { + ctx.Emit(unary_format); + } else if (!method.is_client_streaming() && + method.is_server_streaming()) { + ctx.Emit(server_streaming_format); + } else if (method.is_client_streaming() && + !method.is_server_streaming()) { + ctx.Emit(client_streaming_format); + } else { + ctx.Emit(streaming_format); + } + if (&method != &methods.back()) { + ctx.Emit("\n"); + } + } + } +} + +static void generate_client(const Service &service, Context &ctx, + const GrpcOpts &opts) { + std::string service_ident = absl::StrFormat("%sClient", service.name()); + std::string client_mod = + absl::StrFormat("%s_client", rust::CamelToSnakeCase(service.name())); + ctx.Emit( + { + {"client_mod", client_mod}, + {"service_ident", service_ident}, + {"service_doc", + [&] { ctx.Emit(ProtoCommentToRustDoc(service.comment())); }}, + {"methods", [&] { GenerateMethods(ctx, service, opts); }}, + }, + R"rs( + /// Generated client implementations. + pub mod $client_mod$ { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + // will trigger if compression is disabled + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + + $service_doc$ + #[derive(Debug, Clone)] + pub struct $service_ident$ { + inner: tonic::client::Grpc, + } + + impl $service_ident$ + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + + 'static, ::Error: Into + + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + + pub fn with_interceptor(inner: T, interceptor: F) -> + $service_ident$> where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response<>::ResponseBody> + >, + >>::Error: + Into + std::marker::Send + std::marker::Sync, + { + $service_ident$::new(InterceptedService::new(inner, interceptor)) + } + + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) + -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: + CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> + Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> + Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + + $methods$ + } + })rs"); +} + +} // namespace client + +namespace server {} // namespace server + +// Writes the generated service interface into the given +// ZeroCopyOutputStream. +void GenerateService(Context &rust_generator_context, + const ServiceDescriptor *service_desc, + const GrpcOpts &opts) { + const Service service = Service(service_desc); + + client::generate_client(service, rust_generator_context, opts); +} + +std::string GetRsGrpcFile(const protobuf::FileDescriptor &file) { + absl::string_view basename = absl::StripSuffix(file.name(), ".proto"); + return absl::StrCat(basename, "_grpc.pb.rs"); +} + +} // namespace rust_grpc_generator diff --git a/compiler/src/grpc_rust_generator.h b/compiler/src/grpc_rust_generator.h new file mode 100644 index 000000000..b759d3226 --- /dev/null +++ b/compiler/src/grpc_rust_generator.h @@ -0,0 +1,56 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + +#ifndef NET_GRPC_COMPILER_RUST_GENERATOR_H_ +#define NET_GRPC_COMPILER_RUST_GENERATOR_H_ + +#include // for abort() + +#include +#include +#include + +namespace rust_grpc_generator { + +namespace impl { +namespace protobuf = google::protobuf; +} // namespace impl + +class GrpcOpts { + /// Path the module containing the generated message code. Defaults to + /// "self", i.e. the message code and service code is present in the same + /// module. +public: + std::string message_module_path; +}; + +// Writes the generated service interface into the given ZeroCopyOutputStream +void GenerateService( + impl::protobuf::compiler::rust::Context &rust_generator_context, + const impl::protobuf::ServiceDescriptor *service, const GrpcOpts &opts); + +std::string GetRsGrpcFile(const impl::protobuf::FileDescriptor &file); +} // namespace rust_grpc_generator + +#endif // NET_GRPC_COMPILER_RUST_GENERATOR_H_ diff --git a/compiler/src/grpc_rust_plugin.cc b/compiler/src/grpc_rust_plugin.cc new file mode 100644 index 000000000..78545d02a --- /dev/null +++ b/compiler/src/grpc_rust_plugin.cc @@ -0,0 +1,138 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + +#include "grpc_rust_generator.h" +#include +#include +#include +#include +#include +#include + +namespace protobuf = google::protobuf; +namespace rust = google::protobuf::compiler::rust; + +static std::string ReconstructParameterList( + const std::vector> &options) { + std::string result; + for (const auto &[key, value] : options) { + if (!result.empty()) { + result += ","; + } + result += key + "=" + value; + } + return result; +} + +class RustGrpcGenerator : public protobuf::compiler::CodeGenerator { +public: + // Protobuf 5.27 released edition 2023. +#if GOOGLE_PROTOBUF_VERSION >= 5027000 + uint64_t GetSupportedFeatures() const override { + return Feature::FEATURE_PROTO3_OPTIONAL | + Feature::FEATURE_SUPPORTS_EDITIONS; + } + protobuf::Edition GetMinimumEdition() const override { + return protobuf::Edition::EDITION_PROTO2; + } + protobuf::Edition GetMaximumEdition() const override { + return protobuf::Edition::EDITION_2023; + } +#else + uint64_t GetSupportedFeatures() const override { + return Feature::FEATURE_PROTO3_OPTIONAL; + } +#endif + + bool Generate(const protobuf::FileDescriptor *file, + const std::string ¶meter, + protobuf::compiler::GeneratorContext *context, + std::string *error) const override { + // Return early to avoid creating an empty output file. + if (file->service_count() == 0) { + return true; + } + std::vector> options; + protobuf::compiler::ParseGeneratorParameter(parameter, &options); + + // Filter out GRPC options. + std::vector> protobuf_options; + rust_grpc_generator::GrpcOpts grpc_opts; + for (auto opt : options) { + if (opt.first == "message_module_path") { + grpc_opts.message_module_path = opt.second; + } else { + protobuf_options.push_back(opt); + } + } + + if (grpc_opts.message_module_path.empty()) { + grpc_opts.message_module_path = "self"; + } + + // The kernel isn't used by gRPC, it is there to pass Rust protobuf's + // validation. + protobuf_options.emplace_back("kernel", "upb"); + + // Copied from protobuf rust's generator.cc. + absl::StatusOr opts = + rust::Options::Parse(ReconstructParameterList(protobuf_options)); + if (!opts.ok()) { + *error = std::string(opts.status().message()); + return false; + } + + std::vector files_in_current_crate; + context->ListParsedFiles(&files_in_current_crate); + + absl::StatusOr> + import_path_to_crate_name = rust::GetImportPathToCrateNameMap(&*opts); + if (!import_path_to_crate_name.ok()) { + *error = std::string(import_path_to_crate_name.status().message()); + return false; + } + + rust::RustGeneratorContext rust_generator_context( + &files_in_current_crate, &*import_path_to_crate_name); + + rust::Context ctx_without_printer(&*opts, &rust_generator_context, nullptr, + std::vector()); + auto outfile = absl::WrapUnique( + context->Open(rust_grpc_generator::GetRsGrpcFile(*file))); + protobuf::io::Printer printer(outfile.get()); + rust::Context ctx = ctx_without_printer.WithPrinter(&printer); + + for (int i = 0; i < file->service_count(); ++i) { + const protobuf::ServiceDescriptor *service = file->service(i); + rust_grpc_generator::GenerateService(ctx, service, grpc_opts); + } + return true; + } +}; + +int main(int argc, char *argv[]) { + RustGrpcGenerator generator; + return protobuf::compiler::PluginMain(argc, argv, &generator); + return 0; +} diff --git a/grpc-build/Cargo.toml b/grpc-build/Cargo.toml new file mode 100644 index 000000000..375f82ad9 --- /dev/null +++ b/grpc-build/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "grpc-build" +version = "0.9.0-alpha.1" +edition = "2021" +authors = ["gRPC Authors"] +license = "MIT" + +[dependencies] +protobuf-codegen = { version = "4.31.1-release" } diff --git a/grpc-build/src/lib.rs b/grpc-build/src/lib.rs new file mode 100644 index 000000000..b3f77e32d --- /dev/null +++ b/grpc-build/src/lib.rs @@ -0,0 +1,195 @@ +use std::io::Write; +use std::{ + fs::File, + path::{Path, PathBuf}, +}; + +#[derive(Debug, Clone)] +pub struct Dependency { + pub crate_name: String, + pub proto_import_paths: Vec, + pub proto_files: Vec, +} + +impl Into for &Dependency { + fn into(self) -> protobuf_codegen::Dependency { + protobuf_codegen::Dependency { + crate_name: self.crate_name.clone(), + proto_import_paths: self.proto_import_paths.clone(), + // TODO: Is this useful to expose the following field? It's not used + // by protobuf codegen. + c_include_paths: Vec::new(), + proto_files: self.proto_files.clone(), + } + } +} + +/// Service generator builder. +#[derive(Debug, Clone)] +pub struct CodeGen { + inputs: Vec, + output_dir: PathBuf, + includes: Vec, + dependencies: Vec, + // Rust import path for the generated message code. The gRPC service code + // will use this to reference generated message structs. Defaults to "self". + message_module_path: Option, + // Whether to generate message code, defaults to true. + generate_message_code: bool, +} + +impl CodeGen { + pub fn new() -> Self { + Self { + inputs: Vec::new(), + output_dir: PathBuf::from(std::env::var("OUT_DIR").unwrap()), + includes: Vec::new(), + dependencies: Vec::new(), + message_module_path: None, + generate_message_code: true, + } + } + + /// Sets whether to generate the message code. This can be disabled if the + /// message code is being generated independently. + pub fn generate_message_code(&mut self, enable: bool) -> &mut Self { + self.generate_message_code = enable; + self + } + + /// Adds a proto file to compile. + pub fn input(&mut self, input: impl AsRef) -> &mut Self { + self.inputs.push(input.as_ref().to_owned()); + self + } + + /// Adds a proto file to compile. + pub fn inputs(&mut self, inputs: impl IntoIterator>) -> &mut Self { + self.inputs + .extend(inputs.into_iter().map(|input| input.as_ref().to_owned())); + self + } + + /// Sets the directory for the files generated by protoc. The generated code + /// will be present in a subdirectory corresponding to the path of the + /// proto file withing the included directories. + pub fn output_dir(&mut self, output_dir: impl AsRef) -> &mut Self { + self.output_dir = output_dir.as_ref().to_owned(); + self + } + + /// Add a directory for protoc to scan for .proto files. + pub fn include(&mut self, include: impl AsRef) -> &mut Self { + self.includes.push(include.as_ref().to_owned()); + self + } + + /// Add a directory for protoc to scan for .proto files. + pub fn includes(&mut self, includes: impl Iterator>) -> &mut Self { + self.includes.extend( + includes + .into_iter() + .map(|include| include.as_ref().to_owned()), + ); + self + } + + /// Adds a Rust crate along with a list of proto files whose generated + /// messages it contains. + pub fn dependency(&mut self, deps: Vec) -> &mut Self { + self.dependencies.extend(deps); + self + } + + /// Sets relative path of the module containing the generated message code. + /// This is "self" by default, i.e. the service code expects the message + /// structs to be present in the same module. + pub fn message_module_path(&mut self, message_path: &str) -> &mut Self { + self.message_module_path = Some(message_path.to_string()); + self + } + + pub fn generate_and_compile(&self) -> Result<(), String> { + // Generate the message code. + if self.generate_message_code { + protobuf_codegen::CodeGen::new() + .inputs(self.inputs.clone()) + .output_dir(self.output_dir.clone()) + .includes(self.includes.iter()) + .dependency(self.dependencies.iter().map(|d| d.into()).collect()) + .generate_and_compile() + .unwrap(); + } + let crate_mapping_path = if self.generate_message_code { + self.output_dir.join("crate_mapping.txt") + } else { + self.generate_crate_mapping_file() + }; + + // Generate the service code. + let mut cmd = std::process::Command::new("protoc"); + for input in &self.inputs { + cmd.arg(input); + } + if !self.output_dir.exists() { + // Attempt to make the directory if it doesn't exist + let _ = std::fs::create_dir(&self.output_dir); + } + + if !self.generate_message_code { + for include in &self.includes { + println!("cargo:rerun-if-changed={}", include.display()); + } + for dep in &self.dependencies { + for path in &dep.proto_import_paths { + println!("cargo:rerun-if-changed={}", path.display()); + } + } + } + + cmd.arg(format!("--rust-grpc_out={}", self.output_dir.display())) + .arg("--rust-grpc_opt=experimental-codegen=enabled"); + cmd.arg(format!( + "--rust-grpc_opt=crate_mapping={}", + crate_mapping_path.display() + )); + if let Some(message_path) = &self.message_module_path { + cmd.arg(format!( + "--rust-grpc_opt=message_module_path={}", + message_path + )); + } + + for include in &self.includes { + cmd.arg(format!("--proto_path={}", include.display())); + } + for dep in &self.dependencies { + for path in &dep.proto_import_paths { + cmd.arg(format!("--proto_path={}", path.display())); + } + } + + let output = cmd + .output() + .map_err(|e| format!("failed to run protoc: {}", e))?; + println!("{}", std::str::from_utf8(&output.stdout).unwrap()); + eprintln!("{}", std::str::from_utf8(&output.stderr).unwrap()); + assert!(output.status.success()); + Ok(()) + } + + fn generate_crate_mapping_file(&self) -> PathBuf { + let crate_mapping_path = self.output_dir.join("crate_mapping.txt"); + let mut file = File::create(crate_mapping_path.clone()).unwrap(); + for dep in &self.dependencies { + file.write_all(format!("{}\n", dep.crate_name).as_bytes()) + .unwrap(); + file.write_all(format!("{}\n", dep.proto_files.len()).as_bytes()) + .unwrap(); + for f in &dep.proto_files { + file.write_all(format!("{}\n", f).as_bytes()).unwrap(); + } + } + crate_mapping_path + } +} diff --git a/grpc/Cargo.toml b/grpc/Cargo.toml index dd1af082b..6f384cd87 100644 --- a/grpc/Cargo.toml +++ b/grpc/Cargo.toml @@ -15,6 +15,7 @@ serde = "1.0.219" hickory-resolver = { version = "0.25.1", optional = true } rand = "0.8.5" parking_lot = "0.12.4" +protobuf = { version = "4.31.1-release" } bytes = "1.10.1" [dev-dependencies] @@ -28,4 +29,4 @@ dns = ["dep:hickory-resolver"] allowed_external_types = [ "tonic::*", "futures_core::stream::Stream", -] \ No newline at end of file +] diff --git a/grpc/src/codec.rs b/grpc/src/codec.rs new file mode 100644 index 000000000..72bb26b19 --- /dev/null +++ b/grpc/src/codec.rs @@ -0,0 +1,103 @@ +use bytes::{Buf, BufMut}; +use protobuf::Message; +use std::marker::PhantomData; +use tonic::{ + codec::{BufferSettings, Codec, DecodeBuf, Decoder, EncodeBuf, Encoder}, + Status, +}; + +/// A [`Codec`] that implements `application/grpc+proto` via the protobuf +/// library. +#[derive(Debug, Clone)] +pub struct ProtoCodec { + _pd: PhantomData<(T, U)>, +} + +impl ProtoCodec { + /// Configure a ProstCodec with encoder/decoder buffer settings. This is used to control + /// how memory is allocated and grows per RPC. + pub fn new() -> Self { + Self { _pd: PhantomData } + } +} + +impl Default for ProtoCodec { + fn default() -> Self { + Self::new() + } +} + +impl Codec for ProtoCodec +where + T: Message + Send + 'static, + U: Message + Default + Send + 'static, +{ + type Encode = T; + type Decode = U; + + type Encoder = ProtoEncoder; + type Decoder = ProtoDecoder; + + fn encoder(&mut self) -> Self::Encoder { + ProtoEncoder { _pd: PhantomData } + } + + fn decoder(&mut self) -> Self::Decoder { + ProtoDecoder { _pd: PhantomData } + } +} + +/// A [`Encoder`] that knows how to encode `T`. +#[derive(Debug, Clone, Default)] +pub struct ProtoEncoder { + _pd: PhantomData, +} + +impl ProtoEncoder { + /// Get a new encoder with explicit buffer settings + pub fn new() -> Self { + Self { _pd: PhantomData } + } +} + +impl Encoder for ProtoEncoder { + type Item = T; + type Error = Status; + + fn encode(&mut self, item: Self::Item, buf: &mut EncodeBuf<'_>) -> Result<(), Self::Error> { + let serialized = item.serialize().map_err(from_decode_error)?; + buf.put_slice(&serialized.as_slice()); + Ok(()) + } +} + +/// A [`Decoder`] that knows how to decode `U`. +#[derive(Debug, Clone, Default)] +pub struct ProtoDecoder { + _pd: PhantomData, +} + +impl ProtoDecoder { + /// Get a new decoder. + pub fn new() -> Self { + Self { _pd: PhantomData } + } +} + +impl Decoder for ProtoDecoder { + type Item = U; + type Error = Status; + + fn decode(&mut self, buf: &mut DecodeBuf<'_>) -> Result, Self::Error> { + let slice = buf.chunk(); + let item = U::parse(&slice).map_err(from_decode_error)?; + buf.advance(slice.len()); + Ok(Some(item)) + } +} + +fn from_decode_error(error: impl std::error::Error) -> tonic::Status { + // Map Protobuf parse errors to an INTERNAL status code, as per + // https://github.com/grpc/grpc/blob/master/doc/statuscodes.md + Status::internal(error.to_string()) +} diff --git a/grpc/src/lib.rs b/grpc/src/lib.rs index 567925131..38c48d28d 100644 --- a/grpc/src/lib.rs +++ b/grpc/src/lib.rs @@ -33,6 +33,8 @@ #![allow(dead_code)] pub mod client; +pub mod codec; +mod macros; mod rt; pub mod service; diff --git a/grpc/src/macros.rs b/grpc/src/macros.rs new file mode 100644 index 000000000..9c5a24206 --- /dev/null +++ b/grpc/src/macros.rs @@ -0,0 +1,57 @@ +/// Include generated proto server and client items. +/// +/// You must specify the path of the proto file within the proto directory, +/// without the ".proto" extension. +/// +/// ```rust,ignore +/// mod pb { +/// grpc::include_proto!("protos", "helloworld"); +/// } +/// ``` +/// +/// # Note: +/// **This only works if the grpc-build output directory and the message path +/// is unmodified**. +/// The default output directory is set to the [`OUT_DIR`] environment variable +/// and the message path is set to `self`. +/// If the output directory has been modified, the following pattern may be used +/// instead of this macro. +/// +/// If the message path is `self`. +/// ```rust,ignore +/// mod protos { +/// // Include message code. +/// include!("/relative/protobuf/directory/protos/generated.rs"); +/// /// Include service code. +/// include!("/relative/protobuf/directory/proto/helloworld_grpc.pb.rs"); +/// } +///``` +/// +/// If the message code is not in the same module. The following example uses +/// message path as `super::protos`. +/// ```rust,ignore +/// mod protos { +/// // Include message code. +/// include!("/relative/protobuf/directory/protos/generated.rs"); +/// } +/// +/// mod grpc { +/// /// Include service code. +/// include!("/relative/protobuf/directory/proto/helloworld_grpc.pb.rs"); +/// } +/// ``` +/// [`OUT_DIR`]: https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts +#[macro_export] +macro_rules! include_proto { + ($parent_dir:literal, $proto_file:literal) => { + include!(concat!(env!("OUT_DIR"), "/", $parent_dir, "/generated.rs")); + include!(concat!( + env!("OUT_DIR"), + "/", + $parent_dir, + "/", + $proto_file, + "_grpc.pb.rs" + )); + }; +} diff --git a/interop/Cargo.toml b/interop/Cargo.toml index 9994895a4..d9cae4294 100644 --- a/interop/Cargo.toml +++ b/interop/Cargo.toml @@ -12,6 +12,10 @@ path = "src/bin/client.rs" name = "server" path = "src/bin/server.rs" +[[bin]] +name = "client_grpc" +path = "src/bin/client_grpc.rs" + [dependencies] async-stream = "0.3" strum = {version = "0.27", features = ["derive"]} @@ -25,6 +29,9 @@ tokio-stream = "0.1" tonic = {path = "../tonic", features = ["tls-ring"]} tower = "0.5" tracing-subscriber = {version = "0.3"} +protobuf = { version = "4.31.1-release" } +grpc = {path = "../grpc"} [build-dependencies] tonic-build = {path = "../tonic-build", features = ["prost"]} +grpc-build = {path = "../grpc-build"} diff --git a/interop/build.rs b/interop/build.rs index 295d5e2a3..b309b073f 100644 --- a/interop/build.rs +++ b/interop/build.rs @@ -2,6 +2,11 @@ fn main() { let proto = "proto/grpc/testing/test.proto"; tonic_build::compile_protos(proto).unwrap(); + grpc_build::CodeGen::new() + .include("proto/grpc/testing") + .inputs(["test.proto", "empty.proto", "messages.proto"]) + .generate_and_compile() + .unwrap(); // prevent needing to rebuild if files (or deps) haven't changed println!("cargo:rerun-if-changed={proto}"); diff --git a/interop/src/bin/client_grpc.rs b/interop/src/bin/client_grpc.rs new file mode 100644 index 000000000..ae0fec251 --- /dev/null +++ b/interop/src/bin/client_grpc.rs @@ -0,0 +1,136 @@ +use interop::client_grpc; +use std::{str::FromStr, time::Duration}; +use tonic::transport::Endpoint; +use tonic::transport::{Certificate, ClientTlsConfig}; + +#[derive(Debug)] +struct Opts { + use_tls: bool, + test_case: Vec, +} + +impl Opts { + fn parse() -> Result { + let mut pargs = pico_args::Arguments::from_env(); + Ok(Self { + use_tls: pargs.contains("--use_tls"), + test_case: pargs.value_from_fn("--test_case", |test_case| { + test_case.split(',').map(Testcase::from_str).collect() + })?, + }) + } +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + interop::trace_init(); + + let matches = Opts::parse()?; + + let test_cases = matches.test_case; + + let scheme = if matches.use_tls { "https" } else { "http" }; + + #[allow(unused_mut)] + let mut endpoint = Endpoint::try_from(format!("{}://localhost:10000", scheme))? + .timeout(Duration::from_secs(5)) + .concurrency_limit(30); + + if matches.use_tls { + let pem = std::fs::read_to_string("interop/data/ca.pem")?; + let ca = Certificate::from_pem(pem); + endpoint = endpoint.tls_config( + ClientTlsConfig::new() + .ca_certificate(ca) + .domain_name("foo.test.google.fr"), + )?; + } + + let channel = endpoint.connect().await?; + + let mut client = client_grpc::TestClient::new(channel.clone()); + let mut unimplemented_client = client_grpc::UnimplementedClient::new(channel); + + let mut failures = Vec::new(); + + for test_case in test_cases { + println!("{:?}:", test_case); + let mut test_results = Vec::new(); + + match test_case { + Testcase::EmptyUnary => client_grpc::empty_unary(&mut client, &mut test_results).await, + Testcase::LargeUnary => client_grpc::large_unary(&mut client, &mut test_results).await, + Testcase::ClientStreaming => { + client_grpc::client_streaming(&mut client, &mut test_results).await + } + Testcase::ServerStreaming => { + client_grpc::server_streaming(&mut client, &mut test_results).await + } + Testcase::PingPong => client_grpc::ping_pong(&mut client, &mut test_results).await, + Testcase::EmptyStream => { + client_grpc::empty_stream(&mut client, &mut test_results).await + } + Testcase::StatusCodeAndMessage => { + client_grpc::status_code_and_message(&mut client, &mut test_results).await + } + Testcase::SpecialStatusMessage => { + client_grpc::special_status_message(&mut client, &mut test_results).await + } + Testcase::UnimplementedMethod => { + client_grpc::unimplemented_method(&mut client, &mut test_results).await + } + Testcase::UnimplementedService => { + client_grpc::unimplemented_service(&mut unimplemented_client, &mut test_results) + .await + } + Testcase::CustomMetadata => { + client_grpc::custom_metadata(&mut client, &mut test_results).await + } + _ => unimplemented!(), + } + + for result in test_results { + println!(" {}", result); + + if result.is_failed() { + failures.push(result); + } + } + } + + if !failures.is_empty() { + println!("{} tests failed", failures.len()); + std::process::exit(1); + } + + Ok(()) +} + +#[derive(Debug, strum::EnumString)] +#[strum(serialize_all = "snake_case")] +enum Testcase { + EmptyUnary, + CacheableUnary, + LargeUnary, + ClientCompressedUnary, + ServerCompressedUnary, + ClientStreaming, + ClientCompressedStreaming, + ServerStreaming, + ServerCompressedStreaming, + PingPong, + EmptyStream, + ComputeEngineCreds, + JwtTokenCreds, + Oauth2AuthToken, + PerRpcCreds, + CustomMetadata, + StatusCodeAndMessage, + SpecialStatusMessage, + UnimplementedMethod, + UnimplementedService, + CancelAfterBegin, + CancelAfterFirstResponse, + TimeoutOnSleepingServer, + ConcurrentLargeUnary, +} diff --git a/interop/src/client_grpc.rs b/interop/src/client_grpc.rs new file mode 100644 index 000000000..a71b99484 --- /dev/null +++ b/interop/src/client_grpc.rs @@ -0,0 +1,412 @@ +use crate::{ + grpc_pb::test_service_client::*, grpc_pb::unimplemented_service_client::*, grpc_pb::*, + test_assert, TestAssertion, +}; +use protobuf::__internal::MatcherEq; +use protobuf::proto; +use tokio::sync::mpsc; +use tokio_stream::StreamExt; +use tonic::transport::Channel; +use tonic::{metadata::MetadataValue, Code, Request, Response, Status}; + +pub type TestClient = TestServiceClient; +pub type UnimplementedClient = UnimplementedServiceClient; + +const LARGE_REQ_SIZE: usize = 271_828; +const LARGE_RSP_SIZE: i32 = 314_159; +const REQUEST_LENGTHS: &[i32] = &[27182, 8, 1828, 45904]; +const RESPONSE_LENGTHS: &[i32] = &[31415, 9, 2653, 58979]; +const TEST_STATUS_MESSAGE: &str = "test status message"; +const SPECIAL_TEST_STATUS_MESSAGE: &str = + "\t\ntest with whitespace\r\nand Unicode BMP ☺ and non-BMP 😈\t\n"; + +pub async fn empty_unary(client: &mut TestClient, assertions: &mut Vec) { + let result = client.empty_call(Request::new(Empty::default())).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + assertions.push(test_assert!( + "body must not be null", + body.matches(&Empty::default()), + format!("body={:?}", body) + )); + } +} + +pub async fn large_unary(client: &mut TestClient, assertions: &mut Vec) { + use std::mem; + let payload = crate::grpc_utils::client_payload(LARGE_REQ_SIZE); + let req = proto!(SimpleRequest { + response_type: PayloadType::Compressable, + response_size: LARGE_RSP_SIZE, + payload: payload, + }); + + let result = client.unary_call(Request::new(req)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + let payload_len = body.payload().body().len(); + + assertions.push(test_assert!( + "body must be 314159 bytes", + payload_len == LARGE_RSP_SIZE as usize, + format!("mem::size_of_val(&body)={:?}", mem::size_of_val(&body)) + )); + } +} + +// pub async fn cachable_unary(client: &mut Client, assertions: &mut Vec) { +// let payload = Payload { +// r#type: PayloadType::Compressable as i32, +// body: format!("{:?}", std::time::Instant::now()).into_bytes(), +// }; +// let req = SimpleRequest { +// response_type: PayloadType::Compressable as i32, +// payload: Some(payload), +// ..Default::default() +// }; + +// client. +// } + +pub async fn client_streaming(client: &mut TestClient, assertions: &mut Vec) { + let requests = REQUEST_LENGTHS.iter().map(|len| { + proto!(StreamingInputCallRequest { + payload: crate::grpc_utils::client_payload(*len as usize), + }) + }); + + let stream = tokio_stream::iter(requests); + + let result = client.streaming_input_call(Request::new(stream)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + + assertions.push(test_assert!( + "aggregated payload size must be 74922 bytes", + body.aggregated_payload_size() == 74922, + format!( + "aggregated_payload_size={:?}", + body.aggregated_payload_size() + ) + )); + } +} + +pub async fn server_streaming(client: &mut TestClient, assertions: &mut Vec) { + let req = proto!(StreamingOutputCallRequest { + response_parameters: RESPONSE_LENGTHS + .iter() + .map(|len| ResponseParameters::with_size(*len)), + }); + let req = Request::new(req); + + let result = client.streaming_output_call(req).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let responses = response + .into_inner() + .filter_map(|m| m.ok()) + .collect::>() + .await; + let actual_response_lengths = crate::grpc_utils::response_lengths(&responses); + let asserts = vec![ + test_assert!( + "there should be four responses", + responses.len() == 4, + format!("responses.len()={:?}", responses.len()) + ), + test_assert!( + "the response payload sizes should match input", + RESPONSE_LENGTHS == actual_response_lengths.as_slice(), + format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) + ), + ]; + + assertions.extend(asserts); + } +} + +pub async fn ping_pong(client: &mut TestClient, assertions: &mut Vec) { + let (tx, rx) = mpsc::unbounded_channel(); + tx.send(make_ping_pong_request(0)).unwrap(); + + let result = client + .full_duplex_call(Request::new( + tokio_stream::wrappers::UnboundedReceiverStream::new(rx), + )) + .await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(mut response) = result.map(Response::into_inner) { + let mut responses = Vec::new(); + + loop { + match response.next().await { + Some(result) => { + responses.push(result.unwrap()); + if responses.len() == REQUEST_LENGTHS.len() { + drop(tx); + break; + } else { + tx.send(make_ping_pong_request(responses.len())).unwrap(); + } + } + None => { + assertions.push(TestAssertion::Failed { + description: + "server should keep the stream open until the client closes it", + expression: "Stream terminated unexpectedly early", + why: None, + }); + break; + } + } + } + + let actual_response_lengths = crate::grpc_utils::response_lengths(&responses); + assertions.push(test_assert!( + "there should be four responses", + responses.len() == RESPONSE_LENGTHS.len(), + format!("{:?}={:?}", responses.len(), RESPONSE_LENGTHS.len()) + )); + assertions.push(test_assert!( + "the response payload sizes should match input", + RESPONSE_LENGTHS == actual_response_lengths.as_slice(), + format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) + )); + } +} + +pub async fn empty_stream(client: &mut TestClient, assertions: &mut Vec) { + let stream = tokio_stream::empty(); + let result = client.full_duplex_call(Request::new(stream)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result.map(Response::into_inner) { + let responses = response.collect::>().await; + + assertions.push(test_assert!( + "there should be no responses", + responses.is_empty(), + format!("responses.len()={:?}", responses.len()) + )); + } +} + +pub async fn status_code_and_message(client: &mut TestClient, assertions: &mut Vec) { + fn validate_response(result: Result, assertions: &mut Vec) + where + T: std::fmt::Debug, + { + assertions.push(test_assert!( + "call must fail with unknown status code", + match &result { + Err(status) => status.code() == Code::Unknown, + _ => false, + }, + format!("result={:?}", result) + )); + + assertions.push(test_assert!( + "call must respsond with expected status message", + match &result { + Err(status) => status.message() == TEST_STATUS_MESSAGE, + _ => false, + }, + format!("result={:?}", result) + )); + } + + let simple_req = proto!(SimpleRequest { + response_status: EchoStatus { + code: 2, + message: TEST_STATUS_MESSAGE.to_string(), + }, + }); + + let duplex_req = proto!(StreamingOutputCallRequest { + response_status: EchoStatus { + code: 2, + message: TEST_STATUS_MESSAGE.to_string(), + }, + }); + + let result = client.unary_call(Request::new(simple_req)).await; + validate_response(result, assertions); + + let stream = tokio_stream::once(duplex_req); + let result = match client.full_duplex_call(Request::new(stream)).await { + Ok(response) => { + let stream = response.into_inner(); + let responses = stream.collect::>().await; + Ok(responses) + } + Err(e) => Err(e), + }; + + validate_response(result, assertions); +} + +pub async fn special_status_message(client: &mut TestClient, assertions: &mut Vec) { + let req = proto!(SimpleRequest { + response_status: EchoStatus { + code: 2, + message: SPECIAL_TEST_STATUS_MESSAGE.to_string(), + }, + }); + + let result = client.unary_call(Request::new(req)).await; + + assertions.push(test_assert!( + "call must fail with unknown status code", + match &result { + Err(status) => status.code() == Code::Unknown, + _ => false, + }, + format!("result={:?}", result) + )); + + assertions.push(test_assert!( + "call must respsond with expected status message", + match &result { + Err(status) => status.message() == SPECIAL_TEST_STATUS_MESSAGE, + _ => false, + }, + format!("result={:?}", result) + )); +} + +pub async fn unimplemented_method(client: &mut TestClient, assertions: &mut Vec) { + let result = client + .unimplemented_call(Request::new(Empty::default())) + .await; + assertions.push(test_assert!( + "call must fail with unimplemented status code", + match &result { + Err(status) => status.code() == Code::Unimplemented, + _ => false, + }, + format!("result={:?}", result) + )); +} + +pub async fn unimplemented_service( + client: &mut UnimplementedClient, + assertions: &mut Vec, +) { + let result = client + .unimplemented_call(Request::new(Empty::default())) + .await; + assertions.push(test_assert!( + "call must fail with unimplemented status code", + match &result { + Err(status) => status.code() == Code::Unimplemented, + _ => false, + }, + format!("result={:?}", result) + )); +} + +pub async fn custom_metadata(client: &mut TestClient, assertions: &mut Vec) { + let key1 = "x-grpc-test-echo-initial"; + let value1: MetadataValue<_> = "test_initial_metadata_value".parse().unwrap(); + let key2 = "x-grpc-test-echo-trailing-bin"; + let value2 = MetadataValue::from_bytes(&[0xab, 0xab, 0xab]); + + let req = proto!(SimpleRequest { + response_type: PayloadType::Compressable, + response_size: LARGE_RSP_SIZE, + payload: crate::grpc_utils::client_payload(LARGE_REQ_SIZE), + }); + let mut req_unary = Request::new(req); + req_unary.metadata_mut().insert(key1, value1.clone()); + req_unary.metadata_mut().insert_bin(key2, value2.clone()); + + let stream = tokio_stream::once(make_ping_pong_request(0)); + let mut req_stream = Request::new(stream); + req_stream.metadata_mut().insert(key1, value1.clone()); + req_stream.metadata_mut().insert_bin(key2, value2.clone()); + + let response = client + .unary_call(req_unary) + .await + .expect("call should pass."); + + assertions.push(test_assert!( + "metadata string must match in unary", + response.metadata().get(key1) == Some(&value1), + format!("result={:?}", response.metadata().get(key1)) + )); + assertions.push(test_assert!( + "metadata bin must match in unary", + response.metadata().get_bin(key2) == Some(&value2), + format!("result={:?}", response.metadata().get_bin(key1)) + )); + + let response = client + .full_duplex_call(req_stream) + .await + .expect("call should pass."); + + assertions.push(test_assert!( + "metadata string must match in unary", + response.metadata().get(key1) == Some(&value1), + format!("result={:?}", response.metadata().get(key1)) + )); + + let mut stream = response.into_inner(); + + let trailers = stream.trailers().await.unwrap().unwrap(); + + assertions.push(test_assert!( + "metadata bin must match in unary", + trailers.get_bin(key2) == Some(&value2), + format!("result={:?}", trailers.get_bin(key1)) + )); +} + +fn make_ping_pong_request(idx: usize) -> StreamingOutputCallRequest { + let req_len = REQUEST_LENGTHS[idx]; + let resp_len = RESPONSE_LENGTHS[idx]; + proto!(StreamingOutputCallRequest { + response_parameters: std::iter::once(ResponseParameters::with_size(resp_len)), + payload: crate::grpc_utils::client_payload(req_len as usize), + }) +} diff --git a/interop/src/lib.rs b/interop/src/lib.rs index 961e0fdf7..71a099959 100644 --- a/interop/src/lib.rs +++ b/interop/src/lib.rs @@ -1,6 +1,7 @@ #![recursion_limit = "256"] pub mod client; +pub mod client_grpc; pub mod server; pub mod pb { @@ -9,6 +10,10 @@ pub mod pb { include!(concat!(env!("OUT_DIR"), "/grpc.testing.rs")); } +pub mod grpc_pb { + grpc::include_proto!("", "test"); +} + use std::{default, fmt, iter}; pub fn trace_init() { @@ -49,6 +54,32 @@ fn response_lengths(responses: &[pb::StreamingOutputCallResponse]) -> Vec { responses.iter().map(&response_length).collect() } +mod grpc_utils { + use super::grpc_pb; + use protobuf::proto; + use std::iter; + + pub(crate) fn client_payload(size: usize) -> grpc_pb::Payload { + proto!(grpc_pb::Payload { + body: iter::repeat_n(0u8, size).collect::>(), + }) + } + + impl grpc_pb::ResponseParameters { + pub(crate) fn with_size(size: i32) -> Self { + proto!(grpc_pb::ResponseParameters { size: size }) + } + } + + pub(crate) fn response_length(response: &grpc_pb::StreamingOutputCallResponse) -> i32 { + response.payload().body().len() as i32 + } + + pub(crate) fn response_lengths(responses: &[grpc_pb::StreamingOutputCallResponse]) -> Vec { + responses.iter().map(&response_length).collect() + } +} + #[derive(Debug)] pub enum TestAssertion { Passed { diff --git a/interop/test.sh b/interop/test.sh index c4628d164..676ec4eae 100755 --- a/interop/test.sh +++ b/interop/test.sh @@ -59,6 +59,9 @@ sleep 1 ./target/debug/client --test_case="${JOINED_TEST_CASES}" "${ARG}" +# Test a grpc rust client against a Go server. +./target/debug/client_grpc --test_case="${JOINED_TEST_CASES}" ${ARG} + echo ":; killing test server"; kill "${SERVER_PID}"; # run the test server From 6a379ea6fc32751f167ec57cea48b89e55f63806 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Wed, 2 Jul 2025 21:18:48 +0530 Subject: [PATCH 02/26] CI fixes --- .github/workflows/CI.yml | 78 +++++++++++++++++++++++++++++++++++++++- grpc-build/src/lib.rs | 16 ++++++--- grpc/src/codec.rs | 2 +- 3 files changed, 89 insertions(+), 7 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 27411add8..7804c2645 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -33,6 +33,25 @@ jobs: with: components: clippy - uses: taiki-e/install-action@protoc + - name: Install Bazel + uses: bazel-contrib/setup-bazel@0.15.0 + with: + # Avoid downloading Bazel every time. + bazelisk-cache: true + # Store build cache per workflow. + disk-cache: ${{ github.workflow }} + # Share repository cache between workflows. + repository-cache: true + module-root: ./compiler + - name: Build and protoc plugin and add to PATH + id: build_step + # This runs all commands within the compiler/ directory + working-directory: ./compiler + run: | + bazel build //src:protoc-gen-rust-grpc + + # Add the output directory to the GitHub PATH for subsequent steps + echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 - run: cargo clippy --workspace --all-features --all-targets @@ -55,6 +74,25 @@ jobs: - uses: taiki-e/install-action@cargo-hack - uses: taiki-e/install-action@cargo-udeps - uses: taiki-e/install-action@protoc + - name: Install Bazel + uses: bazel-contrib/setup-bazel@0.15.0 + with: + # Avoid downloading Bazel every time. + bazelisk-cache: true + # Store build cache per workflow. + disk-cache: ${{ github.workflow }} + # Share repository cache between workflows. + repository-cache: true + module-root: ./compiler + - name: Build and protoc plugin and add to PATH + id: build_step + # This runs all commands within the compiler/ directory + working-directory: ./compiler + run: | + bazel build //src:protoc-gen-rust-grpc + + # Add the output directory to the GitHub PATH for subsequent steps + echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 - run: cargo hack udeps --workspace --exclude-features=_tls-any,tls,tls-aws-lc,tls-ring --each-feature - run: cargo udeps --package tonic --features tls-ring,transport @@ -76,6 +114,25 @@ jobs: - uses: hecrj/setup-rust-action@v2 - uses: taiki-e/install-action@cargo-hack - uses: taiki-e/install-action@protoc + - name: Install Bazel + uses: bazel-contrib/setup-bazel@0.15.0 + with: + # Avoid downloading Bazel every time. + bazelisk-cache: true + # Store build cache per workflow. + disk-cache: ${{ github.workflow }} + # Share repository cache between workflows. + repository-cache: true + module-root: ./compiler + - name: Build and protoc plugin and add to PATH + id: build_step + # This runs all commands within the compiler/ directory + working-directory: ./compiler + run: | + bazel build //src:protoc-gen-rust-grpc + + # Add the output directory to the GitHub PATH for subsequent steps + echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 - name: Check features run: cargo hack check --workspace --no-private --each-feature --no-dev-deps @@ -115,6 +172,25 @@ jobs: - uses: actions/checkout@v4 - uses: hecrj/setup-rust-action@v2 - uses: taiki-e/install-action@protoc + - name: Install Bazel + uses: bazel-contrib/setup-bazel@0.15.0 + with: + # Avoid downloading Bazel every time. + bazelisk-cache: true + # Store build cache per workflow. + disk-cache: ${{ github.workflow }} + # Share repository cache between workflows. + repository-cache: true + module-root: ./compiler + - name: Build and protoc plugin and add to PATH + id: build_step + # This runs all commands within the compiler/ directory + working-directory: ./compiler + run: | + bazel build //src:protoc-gen-rust-grpc + + # Add the output directory to the GitHub PATH for subsequent steps + echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH - uses: taiki-e/install-action@cargo-hack - uses: taiki-e/install-action@cargo-nextest - uses: Swatinem/rust-cache@v2 @@ -151,7 +227,7 @@ jobs: # Share repository cache between workflows. repository-cache: true module-root: ./compiler - - name: Build and protoc plugin and dd to PATH + - name: Build and protoc plugin and add to PATH id: build_step # This runs all commands within the compiler/ directory working-directory: ./compiler diff --git a/grpc-build/src/lib.rs b/grpc-build/src/lib.rs index b3f77e32d..84b2e226a 100644 --- a/grpc-build/src/lib.rs +++ b/grpc-build/src/lib.rs @@ -11,15 +11,15 @@ pub struct Dependency { pub proto_files: Vec, } -impl Into for &Dependency { - fn into(self) -> protobuf_codegen::Dependency { +impl From<&Dependency> for protobuf_codegen::Dependency { + fn from(val: &Dependency) -> Self { protobuf_codegen::Dependency { - crate_name: self.crate_name.clone(), - proto_import_paths: self.proto_import_paths.clone(), + crate_name: val.crate_name.clone(), + proto_import_paths: val.proto_import_paths.clone(), // TODO: Is this useful to expose the following field? It's not used // by protobuf codegen. c_include_paths: Vec::new(), - proto_files: self.proto_files.clone(), + proto_files: val.proto_files.clone(), } } } @@ -193,3 +193,9 @@ impl CodeGen { crate_mapping_path } } + +impl Default for CodeGen { + fn default() -> Self { + Self::new() + } +} diff --git a/grpc/src/codec.rs b/grpc/src/codec.rs index 72bb26b19..88c4346b3 100644 --- a/grpc/src/codec.rs +++ b/grpc/src/codec.rs @@ -2,7 +2,7 @@ use bytes::{Buf, BufMut}; use protobuf::Message; use std::marker::PhantomData; use tonic::{ - codec::{BufferSettings, Codec, DecodeBuf, Decoder, EncodeBuf, Encoder}, + codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder}, Status, }; From f3f9446f95e756292484418d7d0b9c29e00ed35c Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Fri, 4 Jul 2025 13:56:58 +0530 Subject: [PATCH 03/26] Enforce c++17 --- .github/workflows/CI.yml | 10 +++++----- compiler/.bazelrc | 13 +++++++++++++ 2 files changed, 18 insertions(+), 5 deletions(-) create mode 100644 compiler/.bazelrc diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 7804c2645..ab443a6db 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -48,7 +48,7 @@ jobs: # This runs all commands within the compiler/ directory working-directory: ./compiler run: | - bazel build //src:protoc-gen-rust-grpc + bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config # Add the output directory to the GitHub PATH for subsequent steps echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH @@ -89,7 +89,7 @@ jobs: # This runs all commands within the compiler/ directory working-directory: ./compiler run: | - bazel build //src:protoc-gen-rust-grpc + bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config # Add the output directory to the GitHub PATH for subsequent steps echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH @@ -129,7 +129,7 @@ jobs: # This runs all commands within the compiler/ directory working-directory: ./compiler run: | - bazel build //src:protoc-gen-rust-grpc + bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config # Add the output directory to the GitHub PATH for subsequent steps echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH @@ -187,7 +187,7 @@ jobs: # This runs all commands within the compiler/ directory working-directory: ./compiler run: | - bazel build //src:protoc-gen-rust-grpc + bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config # Add the output directory to the GitHub PATH for subsequent steps echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH @@ -232,7 +232,7 @@ jobs: # This runs all commands within the compiler/ directory working-directory: ./compiler run: | - bazel build //src:protoc-gen-rust-grpc + bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config # Add the output directory to the GitHub PATH for subsequent steps echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH diff --git a/compiler/.bazelrc b/compiler/.bazelrc new file mode 100644 index 000000000..441f80f3f --- /dev/null +++ b/compiler/.bazelrc @@ -0,0 +1,13 @@ +# Define a custom config for common Unix-like flags +build:unix --cxxopt=-std=c++17 +build:unix --host_cxxopt=-std=c++17 + +# Inherit the common 'unix' flags for both macOS and Linux +build:macos --config=unix +build:linux --config=unix + +# Windows flags remain as they are +build:windows --cxxopt=/std:c++17 +build:windows --host_cxxopt=/std:c++17 +build:windows --define=protobuf_allow_msvc=true + From 05f4d7d7b88599eb75b93bf025657d2ecb408724 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Fri, 4 Jul 2025 15:34:49 +0530 Subject: [PATCH 04/26] Cache plugin --- .github/workflows/CI.yml | 175 +++++++++++++++-------------- compiler/.gitignore | 1 - compiler/MODULE.bazel | 3 +- compiler/src/BUILD | 1 - compiler/src/grpc_rust_generator.h | 2 - 5 files changed, 92 insertions(+), 90 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index ab443a6db..f09568817 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -25,33 +25,70 @@ jobs: components: rustfmt - run: cargo fmt --all --check + build-protoc-plugin: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macOS-latest, windows-latest] + outputs: + cache-hit: ${{ steps.cache-plugin.outputs.cache-hit }} + steps: + - uses: actions/checkout@v4 + - name: Cache protoc plugin + id: cache-plugin + uses: actions/cache@v4 + with: + path: ${{ runner.temp }}/protoc-plugin + # The key changes only when plugin source files change + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + - name: Install Bazel + if: steps.cache-plugin.outputs.cache-hit != 'true' + uses: bazel-contrib/setup-bazel@0.15.0 + with: + # Avoid downloading Bazel every time. + bazelisk-cache: true + # Store build cache per workflow. + disk-cache: ${{ github.workflow }} + # Share repository cache between workflows. + repository-cache: true + module-root: ./compiler + - name: Build protoc plugin + if: steps.cache-plugin.outputs.cache-hit != 'true' + working-directory: ./compiler + shell: bash + run: | + set -e + # On windows, the "//src" gets converted to "/". Disable this path + # conversion. + export MSYS_NO_PATHCONV=1 + export MSYS2_ARG_CONV_EXCL="*" + + bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config + + # The target path needs to match the cache config. + TARGET_PATH="${{ runner.temp }}/protoc-plugin" + mkdir -p "${TARGET_PATH}" + cp bazel-bin/src/protoc-gen-rust-grpc "${TARGET_PATH}" + clippy: runs-on: ubuntu-latest + needs: build-protoc-plugin steps: - uses: actions/checkout@v4 - uses: hecrj/setup-rust-action@v2 with: components: clippy - uses: taiki-e/install-action@protoc - - name: Install Bazel - uses: bazel-contrib/setup-bazel@0.15.0 + - name: Restore protoc plugin from cache + id: cache-plugin + uses: actions/cache@v4 with: - # Avoid downloading Bazel every time. - bazelisk-cache: true - # Store build cache per workflow. - disk-cache: ${{ github.workflow }} - # Share repository cache between workflows. - repository-cache: true - module-root: ./compiler - - name: Build and protoc plugin and add to PATH - id: build_step - # This runs all commands within the compiler/ directory - working-directory: ./compiler + path: ${{ runner.temp }}/protoc-plugin + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + - name: Add protoc plugin to PATH + shell: bash run: | - bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config - - # Add the output directory to the GitHub PATH for subsequent steps - echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH + echo "${{ runner.temp }}/protoc-plugin" >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 - run: cargo clippy --workspace --all-features --all-targets @@ -66,6 +103,7 @@ jobs: udeps: runs-on: ubuntu-latest + needs: build-protoc-plugin steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master @@ -74,25 +112,16 @@ jobs: - uses: taiki-e/install-action@cargo-hack - uses: taiki-e/install-action@cargo-udeps - uses: taiki-e/install-action@protoc - - name: Install Bazel - uses: bazel-contrib/setup-bazel@0.15.0 + - name: Restore protoc plugin from cache + id: cache-plugin + uses: actions/cache@v4 with: - # Avoid downloading Bazel every time. - bazelisk-cache: true - # Store build cache per workflow. - disk-cache: ${{ github.workflow }} - # Share repository cache between workflows. - repository-cache: true - module-root: ./compiler - - name: Build and protoc plugin and add to PATH - id: build_step - # This runs all commands within the compiler/ directory - working-directory: ./compiler + path: ${{ runner.temp }}/protoc-plugin + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + - name: Add protoc plugin to PATH + shell: bash run: | - bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config - - # Add the output directory to the GitHub PATH for subsequent steps - echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH + echo "${{ runner.temp }}/protoc-plugin" >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 - run: cargo hack udeps --workspace --exclude-features=_tls-any,tls,tls-aws-lc,tls-ring --each-feature - run: cargo udeps --package tonic --features tls-ring,transport @@ -104,6 +133,7 @@ jobs: check: runs-on: ${{ matrix.os }} + needs: build-protoc-plugin strategy: matrix: os: [ubuntu-latest, macOS-latest, windows-latest] @@ -114,25 +144,16 @@ jobs: - uses: hecrj/setup-rust-action@v2 - uses: taiki-e/install-action@cargo-hack - uses: taiki-e/install-action@protoc - - name: Install Bazel - uses: bazel-contrib/setup-bazel@0.15.0 + - name: Restore protoc plugin from cache + id: cache-plugin + uses: actions/cache@v4 with: - # Avoid downloading Bazel every time. - bazelisk-cache: true - # Store build cache per workflow. - disk-cache: ${{ github.workflow }} - # Share repository cache between workflows. - repository-cache: true - module-root: ./compiler - - name: Build and protoc plugin and add to PATH - id: build_step - # This runs all commands within the compiler/ directory - working-directory: ./compiler + path: ${{ runner.temp }}/protoc-plugin + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + - name: Add protoc plugin to PATH + shell: bash run: | - bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config - - # Add the output directory to the GitHub PATH for subsequent steps - echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH + echo "${{ runner.temp }}/protoc-plugin" >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 - name: Check features run: cargo hack check --workspace --no-private --each-feature --no-dev-deps @@ -165,6 +186,7 @@ jobs: test: runs-on: ${{ matrix.os }} + needs: build-protoc-plugin strategy: matrix: os: [ubuntu-latest, macOS-latest, windows-latest] @@ -172,25 +194,16 @@ jobs: - uses: actions/checkout@v4 - uses: hecrj/setup-rust-action@v2 - uses: taiki-e/install-action@protoc - - name: Install Bazel - uses: bazel-contrib/setup-bazel@0.15.0 + - name: Restore protoc plugin from cache + id: cache-plugin + uses: actions/cache@v4 with: - # Avoid downloading Bazel every time. - bazelisk-cache: true - # Store build cache per workflow. - disk-cache: ${{ github.workflow }} - # Share repository cache between workflows. - repository-cache: true - module-root: ./compiler - - name: Build and protoc plugin and add to PATH - id: build_step - # This runs all commands within the compiler/ directory - working-directory: ./compiler + path: ${{ runner.temp }}/protoc-plugin + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + - name: Add protoc plugin to PATH + shell: bash run: | - bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config - - # Add the output directory to the GitHub PATH for subsequent steps - echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH + echo "${{ runner.temp }}/protoc-plugin" >> $GITHUB_PATH - uses: taiki-e/install-action@cargo-hack - uses: taiki-e/install-action@cargo-nextest - uses: Swatinem/rust-cache@v2 @@ -210,6 +223,7 @@ jobs: interop: name: Interop Tests runs-on: ${{ matrix.os }} + needs: build-protoc-plugin strategy: matrix: os: [ubuntu-latest, macOS-latest, windows-latest] @@ -217,25 +231,16 @@ jobs: - uses: actions/checkout@v4 - uses: hecrj/setup-rust-action@v2 - uses: taiki-e/install-action@protoc - - name: Install Bazel - uses: bazel-contrib/setup-bazel@0.15.0 + - name: Restore protoc plugin from cache + id: cache-plugin + uses: actions/cache@v4 with: - # Avoid downloading Bazel every time. - bazelisk-cache: true - # Store build cache per workflow. - disk-cache: ${{ github.workflow }} - # Share repository cache between workflows. - repository-cache: true - module-root: ./compiler - - name: Build and protoc plugin and add to PATH - id: build_step - # This runs all commands within the compiler/ directory - working-directory: ./compiler + path: ${{ runner.temp }}/protoc-plugin + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + - name: Add protoc plugin to PATH + shell: bash run: | - bazel build //src:protoc-gen-rust-grpc --enable_platform_specific_config - - # Add the output directory to the GitHub PATH for subsequent steps - echo "$(pwd)/bazel-bin/src" >> $GITHUB_PATH + echo "${{ runner.temp }}/protoc-plugin" >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 - name: Run interop tests run: ./interop/test.sh diff --git a/compiler/.gitignore b/compiler/.gitignore index 1707cb793..3932fccd9 100644 --- a/compiler/.gitignore +++ b/compiler/.gitignore @@ -6,4 +6,3 @@ bazel-grpc-java bazel-out bazel-testlogs MODULE.bazel.lock - diff --git a/compiler/MODULE.bazel b/compiler/MODULE.bazel index da3a3b966..2e577e648 100644 --- a/compiler/MODULE.bazel +++ b/compiler/MODULE.bazel @@ -9,7 +9,8 @@ git_override( module_name = "hedron_compile_commands", # Using a commit from a fork to workaround failures while using absl. # TODO: replace with a commit on the official repo once the following PR is - # merged: https://github.com/hedronvision/bazel-compile-commands-extractor/pull/219 + # merged: + # https://github.com/hedronvision/bazel-compile-commands-extractor/pull/219 remote = "https://github.com/mikael-s-persson/bazel-compile-commands-extractor", commit = "f5fbd4cee671d8d908f37c83abaf70fba5928fc7" ) diff --git a/compiler/src/BUILD b/compiler/src/BUILD index 97388b7ea..8a66dfa25 100644 --- a/compiler/src/BUILD +++ b/compiler/src/BUILD @@ -10,4 +10,3 @@ cc_binary( "@com_google_protobuf//:protoc_lib", ], ) - diff --git a/compiler/src/grpc_rust_generator.h b/compiler/src/grpc_rust_generator.h index b759d3226..d0f61ffaa 100644 --- a/compiler/src/grpc_rust_generator.h +++ b/compiler/src/grpc_rust_generator.h @@ -25,8 +25,6 @@ #ifndef NET_GRPC_COMPILER_RUST_GENERATOR_H_ #define NET_GRPC_COMPILER_RUST_GENERATOR_H_ -#include // for abort() - #include #include #include From 805cf79415fcca3b57f54cb072ebeba0b66672d4 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Sun, 6 Jul 2025 18:33:27 +0530 Subject: [PATCH 05/26] Feature gate protobuf dependency --- compiler/src/grpc_rust_generator.cc | 16 +++++------ grpc/Cargo.toml | 5 ++-- grpc/src/codec/mod.rs | 26 ++++++++++++++++++ grpc/src/{codec.rs => codec/protobuf.rs} | 34 +++++++++++++++++------- 4 files changed, 62 insertions(+), 19 deletions(-) create mode 100644 grpc/src/codec/mod.rs rename grpc/src/{codec.rs => codec/protobuf.rs} (67%) diff --git a/compiler/src/grpc_rust_generator.cc b/compiler/src/grpc_rust_generator.cc index fcc3aeee8..178177dd3 100644 --- a/compiler/src/grpc_rust_generator.cc +++ b/compiler/src/grpc_rust_generator.cc @@ -312,14 +312,14 @@ static void GenerateMethods(Context &ctx, const Service &service, const std::string response_type = RsTypePath(ctx, method.response_name(ctx), opts, 1); { - auto vars = - ctx.printer().WithVars({{"codec_name", "grpc::codec::ProtoCodec"}, - {"ident", method.name()}, - {"request", request_type}, - {"response", response_type}, - {"service_name", service.full_name()}, - {"path", FormatMethodPath(service, method)}, - {"method_name", method.proto_field_name()}}); + auto vars = ctx.printer().WithVars( + {{"codec_name", "grpc::codec::protobuf::ProtoCodec"}, + {"ident", method.name()}, + {"request", request_type}, + {"response", response_type}, + {"service_name", service.full_name()}, + {"path", FormatMethodPath(service, method)}, + {"method_name", method.proto_field_name()}}); if (!method.is_client_streaming() && !method.is_server_streaming()) { ctx.Emit(unary_format); diff --git a/grpc/Cargo.toml b/grpc/Cargo.toml index 6f384cd87..333da05d6 100644 --- a/grpc/Cargo.toml +++ b/grpc/Cargo.toml @@ -15,15 +15,16 @@ serde = "1.0.219" hickory-resolver = { version = "0.25.1", optional = true } rand = "0.8.5" parking_lot = "0.12.4" -protobuf = { version = "4.31.1-release" } +protobuf = { version = "4.31.1-release", optional = true } bytes = "1.10.1" [dev-dependencies] hickory-server = "0.25.2" [features] -default = ["dns"] +default = ["dns", "protobuf"] dns = ["dep:hickory-resolver"] +protobuf = ["dep:protobuf"] [package.metadata.cargo_check_external_types] allowed_external_types = [ diff --git a/grpc/src/codec/mod.rs b/grpc/src/codec/mod.rs new file mode 100644 index 000000000..fecc13af7 --- /dev/null +++ b/grpc/src/codec/mod.rs @@ -0,0 +1,26 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + +#[cfg(feature = "protobuf")] +pub mod protobuf; diff --git a/grpc/src/codec.rs b/grpc/src/codec/protobuf.rs similarity index 67% rename from grpc/src/codec.rs rename to grpc/src/codec/protobuf.rs index 88c4346b3..747294d7c 100644 --- a/grpc/src/codec.rs +++ b/grpc/src/codec/protobuf.rs @@ -1,3 +1,27 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + use bytes::{Buf, BufMut}; use protobuf::Message; use std::marker::PhantomData; @@ -13,17 +37,9 @@ pub struct ProtoCodec { _pd: PhantomData<(T, U)>, } -impl ProtoCodec { - /// Configure a ProstCodec with encoder/decoder buffer settings. This is used to control - /// how memory is allocated and grows per RPC. - pub fn new() -> Self { - Self { _pd: PhantomData } - } -} - impl Default for ProtoCodec { fn default() -> Self { - Self::new() + Self { _pd: PhantomData } } } From da8873f387480498edf5146ed61b5cd0f779a71c Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Sun, 6 Jul 2025 18:39:28 +0530 Subject: [PATCH 06/26] Rename compiler directory --- .github/workflows/CI.yml | 16 ++++++++-------- {compiler => protoc-gen-rust-grpc}/.bazelrc | 0 {compiler => protoc-gen-rust-grpc}/.gitignore | 0 {compiler => protoc-gen-rust-grpc}/MODULE.bazel | 0 {compiler => protoc-gen-rust-grpc}/README.md | 0 protoc-gen-rust-grpc/bazel-compiler | 1 + {compiler => protoc-gen-rust-grpc}/src/BUILD | 0 .../src/grpc_rust_generator.cc | 0 .../src/grpc_rust_generator.h | 0 .../src/grpc_rust_plugin.cc | 0 10 files changed, 9 insertions(+), 8 deletions(-) rename {compiler => protoc-gen-rust-grpc}/.bazelrc (100%) rename {compiler => protoc-gen-rust-grpc}/.gitignore (100%) rename {compiler => protoc-gen-rust-grpc}/MODULE.bazel (100%) rename {compiler => protoc-gen-rust-grpc}/README.md (100%) create mode 120000 protoc-gen-rust-grpc/bazel-compiler rename {compiler => protoc-gen-rust-grpc}/src/BUILD (100%) rename {compiler => protoc-gen-rust-grpc}/src/grpc_rust_generator.cc (100%) rename {compiler => protoc-gen-rust-grpc}/src/grpc_rust_generator.h (100%) rename {compiler => protoc-gen-rust-grpc}/src/grpc_rust_plugin.cc (100%) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index f09568817..c3125697e 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -40,7 +40,7 @@ jobs: with: path: ${{ runner.temp }}/protoc-plugin # The key changes only when plugin source files change - key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('protoc-gen-rust-grpc/src/**', 'protoc-gen-rust-grpc/.bazelrc', 'protoc-gen-rust-grpc/MODULE.bazel') }} - name: Install Bazel if: steps.cache-plugin.outputs.cache-hit != 'true' uses: bazel-contrib/setup-bazel@0.15.0 @@ -51,10 +51,10 @@ jobs: disk-cache: ${{ github.workflow }} # Share repository cache between workflows. repository-cache: true - module-root: ./compiler + module-root: ./protoc-gen-rust-grpc - name: Build protoc plugin if: steps.cache-plugin.outputs.cache-hit != 'true' - working-directory: ./compiler + working-directory: ./protoc-gen-rust-grpc shell: bash run: | set -e @@ -84,7 +84,7 @@ jobs: uses: actions/cache@v4 with: path: ${{ runner.temp }}/protoc-plugin - key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('protoc-gen-rust-grpc/src/**', 'protoc-gen-rust-grpc/.bazelrc', 'protoc-gen-rust-grpc/MODULE.bazel') }} - name: Add protoc plugin to PATH shell: bash run: | @@ -117,7 +117,7 @@ jobs: uses: actions/cache@v4 with: path: ${{ runner.temp }}/protoc-plugin - key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('protoc-gen-rust-grpc/src/**', 'protoc-gen-rust-grpc/.bazelrc', 'protoc-gen-rust-grpc/MODULE.bazel') }} - name: Add protoc plugin to PATH shell: bash run: | @@ -149,7 +149,7 @@ jobs: uses: actions/cache@v4 with: path: ${{ runner.temp }}/protoc-plugin - key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('protoc-gen-rust-grpc/src/**', 'protoc-gen-rust-grpc/.bazelrc', 'protoc-gen-rust-grpc/MODULE.bazel') }} - name: Add protoc plugin to PATH shell: bash run: | @@ -199,7 +199,7 @@ jobs: uses: actions/cache@v4 with: path: ${{ runner.temp }}/protoc-plugin - key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('protoc-gen-rust-grpc/src/**', 'protoc-gen-rust-grpc/.bazelrc', 'protoc-gen-rust-grpc/MODULE.bazel') }} - name: Add protoc plugin to PATH shell: bash run: | @@ -236,7 +236,7 @@ jobs: uses: actions/cache@v4 with: path: ${{ runner.temp }}/protoc-plugin - key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('compiler/src/**', 'compiler/.bazelrc', 'compiler/MODULE.bazel') }} + key: ${{ runner.os }}-protoc-plugin-${{ hashFiles('protoc-gen-rust-grpc/src/**', 'protoc-gen-rust-grpc/.bazelrc', 'protoc-gen-rust-grpc/MODULE.bazel') }} - name: Add protoc plugin to PATH shell: bash run: | diff --git a/compiler/.bazelrc b/protoc-gen-rust-grpc/.bazelrc similarity index 100% rename from compiler/.bazelrc rename to protoc-gen-rust-grpc/.bazelrc diff --git a/compiler/.gitignore b/protoc-gen-rust-grpc/.gitignore similarity index 100% rename from compiler/.gitignore rename to protoc-gen-rust-grpc/.gitignore diff --git a/compiler/MODULE.bazel b/protoc-gen-rust-grpc/MODULE.bazel similarity index 100% rename from compiler/MODULE.bazel rename to protoc-gen-rust-grpc/MODULE.bazel diff --git a/compiler/README.md b/protoc-gen-rust-grpc/README.md similarity index 100% rename from compiler/README.md rename to protoc-gen-rust-grpc/README.md diff --git a/protoc-gen-rust-grpc/bazel-compiler b/protoc-gen-rust-grpc/bazel-compiler new file mode 120000 index 000000000..e33ec1d76 --- /dev/null +++ b/protoc-gen-rust-grpc/bazel-compiler @@ -0,0 +1 @@ +/usr/local/google/home/arjansbal/.cache/bazel/_bazel_arjansbal/5dfb8b6b7b3bf2499753e6203fa01927/execroot/_main \ No newline at end of file diff --git a/compiler/src/BUILD b/protoc-gen-rust-grpc/src/BUILD similarity index 100% rename from compiler/src/BUILD rename to protoc-gen-rust-grpc/src/BUILD diff --git a/compiler/src/grpc_rust_generator.cc b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc similarity index 100% rename from compiler/src/grpc_rust_generator.cc rename to protoc-gen-rust-grpc/src/grpc_rust_generator.cc diff --git a/compiler/src/grpc_rust_generator.h b/protoc-gen-rust-grpc/src/grpc_rust_generator.h similarity index 100% rename from compiler/src/grpc_rust_generator.h rename to protoc-gen-rust-grpc/src/grpc_rust_generator.h diff --git a/compiler/src/grpc_rust_plugin.cc b/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc similarity index 100% rename from compiler/src/grpc_rust_plugin.cc rename to protoc-gen-rust-grpc/src/grpc_rust_plugin.cc From 1a66195f2cc5fb642cd074c537b1d89207ec5e96 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Sun, 6 Jul 2025 18:57:46 +0530 Subject: [PATCH 07/26] Fix external types check --- grpc/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/grpc/Cargo.toml b/grpc/Cargo.toml index 333da05d6..d20aab44c 100644 --- a/grpc/Cargo.toml +++ b/grpc/Cargo.toml @@ -30,4 +30,5 @@ protobuf = ["dep:protobuf"] allowed_external_types = [ "tonic::*", "futures_core::stream::Stream", + "protobuf::codegen_traits::Message", ] From 9c25329f60d8c3af190f44abcd1b19dc7bcaa8a5 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Sun, 6 Jul 2025 19:21:28 +0530 Subject: [PATCH 08/26] Rename interop client biary --- interop/Cargo.toml | 2 +- .../{client_grpc.rs => client_new_codegen.rs} | 39 ++++++++++++------- .../{client_grpc.rs => client_new_codegen.rs} | 0 interop/src/lib.rs | 2 +- interop/test.sh | 2 +- 5 files changed, 27 insertions(+), 18 deletions(-) rename interop/src/bin/{client_grpc.rs => client_new_codegen.rs} (69%) rename interop/src/{client_grpc.rs => client_new_codegen.rs} (100%) diff --git a/interop/Cargo.toml b/interop/Cargo.toml index d9cae4294..ef7a31065 100644 --- a/interop/Cargo.toml +++ b/interop/Cargo.toml @@ -14,7 +14,7 @@ path = "src/bin/server.rs" [[bin]] name = "client_grpc" -path = "src/bin/client_grpc.rs" +path = "src/bin/client_new_codegen.rs" [dependencies] async-stream = "0.3" diff --git a/interop/src/bin/client_grpc.rs b/interop/src/bin/client_new_codegen.rs similarity index 69% rename from interop/src/bin/client_grpc.rs rename to interop/src/bin/client_new_codegen.rs index ae0fec251..7e80ff769 100644 --- a/interop/src/bin/client_grpc.rs +++ b/interop/src/bin/client_new_codegen.rs @@ -1,4 +1,4 @@ -use interop::client_grpc; +use interop::client_new_codegen; use std::{str::FromStr, time::Duration}; use tonic::transport::Endpoint; use tonic::transport::{Certificate, ClientTlsConfig}; @@ -48,8 +48,8 @@ async fn main() -> Result<(), Box> { let channel = endpoint.connect().await?; - let mut client = client_grpc::TestClient::new(channel.clone()); - let mut unimplemented_client = client_grpc::UnimplementedClient::new(channel); + let mut client = client_new_codegen::TestClient::new(channel.clone()); + let mut unimplemented_client = client_new_codegen::UnimplementedClient::new(channel); let mut failures = Vec::new(); @@ -58,33 +58,42 @@ async fn main() -> Result<(), Box> { let mut test_results = Vec::new(); match test_case { - Testcase::EmptyUnary => client_grpc::empty_unary(&mut client, &mut test_results).await, - Testcase::LargeUnary => client_grpc::large_unary(&mut client, &mut test_results).await, + Testcase::EmptyUnary => { + client_new_codegen::empty_unary(&mut client, &mut test_results).await + } + Testcase::LargeUnary => { + client_new_codegen::large_unary(&mut client, &mut test_results).await + } Testcase::ClientStreaming => { - client_grpc::client_streaming(&mut client, &mut test_results).await + client_new_codegen::client_streaming(&mut client, &mut test_results).await } Testcase::ServerStreaming => { - client_grpc::server_streaming(&mut client, &mut test_results).await + client_new_codegen::server_streaming(&mut client, &mut test_results).await + } + Testcase::PingPong => { + client_new_codegen::ping_pong(&mut client, &mut test_results).await } - Testcase::PingPong => client_grpc::ping_pong(&mut client, &mut test_results).await, Testcase::EmptyStream => { - client_grpc::empty_stream(&mut client, &mut test_results).await + client_new_codegen::empty_stream(&mut client, &mut test_results).await } Testcase::StatusCodeAndMessage => { - client_grpc::status_code_and_message(&mut client, &mut test_results).await + client_new_codegen::status_code_and_message(&mut client, &mut test_results).await } Testcase::SpecialStatusMessage => { - client_grpc::special_status_message(&mut client, &mut test_results).await + client_new_codegen::special_status_message(&mut client, &mut test_results).await } Testcase::UnimplementedMethod => { - client_grpc::unimplemented_method(&mut client, &mut test_results).await + client_new_codegen::unimplemented_method(&mut client, &mut test_results).await } Testcase::UnimplementedService => { - client_grpc::unimplemented_service(&mut unimplemented_client, &mut test_results) - .await + client_new_codegen::unimplemented_service( + &mut unimplemented_client, + &mut test_results, + ) + .await } Testcase::CustomMetadata => { - client_grpc::custom_metadata(&mut client, &mut test_results).await + client_new_codegen::custom_metadata(&mut client, &mut test_results).await } _ => unimplemented!(), } diff --git a/interop/src/client_grpc.rs b/interop/src/client_new_codegen.rs similarity index 100% rename from interop/src/client_grpc.rs rename to interop/src/client_new_codegen.rs diff --git a/interop/src/lib.rs b/interop/src/lib.rs index 71a099959..d9507a555 100644 --- a/interop/src/lib.rs +++ b/interop/src/lib.rs @@ -1,7 +1,7 @@ #![recursion_limit = "256"] pub mod client; -pub mod client_grpc; +pub mod client_new_codegen; pub mod server; pub mod pb { diff --git a/interop/test.sh b/interop/test.sh index 676ec4eae..974afb2d9 100755 --- a/interop/test.sh +++ b/interop/test.sh @@ -60,7 +60,7 @@ sleep 1 ./target/debug/client --test_case="${JOINED_TEST_CASES}" "${ARG}" # Test a grpc rust client against a Go server. -./target/debug/client_grpc --test_case="${JOINED_TEST_CASES}" ${ARG} +./target/debug/client_new_codegen --test_case="${JOINED_TEST_CASES}" ${ARG} echo ":; killing test server"; kill "${SERVER_PID}"; From 15e1519f9f6161412489aeaa4806a553260c2ffb Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Sun, 6 Jul 2025 19:28:15 +0530 Subject: [PATCH 09/26] Add liscenses --- grpc-build/src/lib.rs | 24 ++++++++++++++++++++++++ grpc/src/macros.rs | 24 ++++++++++++++++++++++++ interop/src/bin/client_new_codegen.rs | 24 ++++++++++++++++++++++++ interop/src/client_new_codegen.rs | 24 ++++++++++++++++++++++++ protoc-gen-rust-grpc/bazel-compiler | 1 - 5 files changed, 96 insertions(+), 1 deletion(-) delete mode 120000 protoc-gen-rust-grpc/bazel-compiler diff --git a/grpc-build/src/lib.rs b/grpc-build/src/lib.rs index 84b2e226a..8e43eab60 100644 --- a/grpc-build/src/lib.rs +++ b/grpc-build/src/lib.rs @@ -1,3 +1,27 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + use std::io::Write; use std::{ fs::File, diff --git a/grpc/src/macros.rs b/grpc/src/macros.rs index 9c5a24206..d102d5f80 100644 --- a/grpc/src/macros.rs +++ b/grpc/src/macros.rs @@ -1,3 +1,27 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + /// Include generated proto server and client items. /// /// You must specify the path of the proto file within the proto directory, diff --git a/interop/src/bin/client_new_codegen.rs b/interop/src/bin/client_new_codegen.rs index 7e80ff769..99b8f700b 100644 --- a/interop/src/bin/client_new_codegen.rs +++ b/interop/src/bin/client_new_codegen.rs @@ -1,3 +1,27 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + use interop::client_new_codegen; use std::{str::FromStr, time::Duration}; use tonic::transport::Endpoint; diff --git a/interop/src/client_new_codegen.rs b/interop/src/client_new_codegen.rs index a71b99484..8afa386ad 100644 --- a/interop/src/client_new_codegen.rs +++ b/interop/src/client_new_codegen.rs @@ -1,3 +1,27 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + use crate::{ grpc_pb::test_service_client::*, grpc_pb::unimplemented_service_client::*, grpc_pb::*, test_assert, TestAssertion, diff --git a/protoc-gen-rust-grpc/bazel-compiler b/protoc-gen-rust-grpc/bazel-compiler deleted file mode 120000 index e33ec1d76..000000000 --- a/protoc-gen-rust-grpc/bazel-compiler +++ /dev/null @@ -1 +0,0 @@ -/usr/local/google/home/arjansbal/.cache/bazel/_bazel_arjansbal/5dfb8b6b7b3bf2499753e6203fa01927/execroot/_main \ No newline at end of file From 202411c8f0ab327cc85653aba9d5c35bb8b85c78 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Sun, 6 Jul 2025 21:00:11 +0530 Subject: [PATCH 10/26] Docs and comments --- grpc-build/src/lib.rs | 17 +++-- grpc/src/macros.rs | 51 ++++++++------- protoc-gen-rust-grpc/.gitignore | 3 +- protoc-gen-rust-grpc/README.md | 62 +++++++++++++++---- .../src/grpc_rust_generator.cc | 18 ++++-- 5 files changed, 104 insertions(+), 47 deletions(-) diff --git a/grpc-build/src/lib.rs b/grpc-build/src/lib.rs index 8e43eab60..009100d27 100644 --- a/grpc-build/src/lib.rs +++ b/grpc-build/src/lib.rs @@ -28,10 +28,16 @@ use std::{ path::{Path, PathBuf}, }; +/// Details about a crate containing proto files with symbols refferenced in +/// the file being compiled currently. #[derive(Debug, Clone)] pub struct Dependency { + /// Name of the external crate. pub crate_name: String, + /// List of paths .proto files whose codegen is present in the crate. This + /// is used to re-run the build command if required. pub proto_import_paths: Vec, + /// List of .proto file names whose codegen is present in the crate. pub proto_files: Vec, } @@ -55,8 +61,6 @@ pub struct CodeGen { output_dir: PathBuf, includes: Vec, dependencies: Vec, - // Rust import path for the generated message code. The gRPC service code - // will use this to reference generated message structs. Defaults to "self". message_module_path: Option, // Whether to generate message code, defaults to true. generate_message_code: bool, @@ -125,9 +129,10 @@ impl CodeGen { self } - /// Sets relative path of the module containing the generated message code. - /// This is "self" by default, i.e. the service code expects the message - /// structs to be present in the same module. + /// Sets path of the module containing the generated message code. This is + /// "self" by default, i.e. the service code expects the message structs to + /// be present in the same module. Set this if the message and service + /// codegen needs to live in separate modules. pub fn message_module_path(&mut self, message_path: &str) -> &mut Self { self.message_module_path = Some(message_path.to_string()); self @@ -211,7 +216,7 @@ impl CodeGen { file.write_all(format!("{}\n", dep.proto_files.len()).as_bytes()) .unwrap(); for f in &dep.proto_files { - file.write_all(format!("{}\n", f).as_bytes()).unwrap(); + file.write_all(format!("{f}\n").as_bytes()).unwrap(); } } crate_mapping_path diff --git a/grpc/src/macros.rs b/grpc/src/macros.rs index d102d5f80..efbbe92dd 100644 --- a/grpc/src/macros.rs +++ b/grpc/src/macros.rs @@ -22,48 +22,57 @@ * */ -/// Include generated proto server and client items. +/// Includes generated proto message, client, and server code. /// -/// You must specify the path of the proto file within the proto directory, -/// without the ".proto" extension. +/// You must specify the path to the `.proto` file +/// **relative to the proto root directory**, without the `.proto` extension. +/// +/// For example, if your proto directory is `path/to/protos` and it contains the +/// file `helloworld.proto`, you would write: /// /// ```rust,ignore /// mod pb { -/// grpc::include_proto!("protos", "helloworld"); +/// grpc::include_proto!("path/to/protos", "helloworld"); /// } /// ``` /// -/// # Note: -/// **This only works if the grpc-build output directory and the message path -/// is unmodified**. -/// The default output directory is set to the [`OUT_DIR`] environment variable -/// and the message path is set to `self`. -/// If the output directory has been modified, the following pattern may be used -/// instead of this macro. +/// # Note +/// **This macro only works if the gRPC build output directory and message path +/// are unmodified.** +/// By default: +/// - The output directory is set to the [`OUT_DIR`] environment variable. +/// - The message path is set to `self`. +/// +/// If you have modified the output directory or message path, you should +/// include the generated code manually instead of using this macro. +/// +/// The following example assumes the message code is imported using `self`: /// -/// If the message path is `self`. /// ```rust,ignore /// mod protos { /// // Include message code. -/// include!("/relative/protobuf/directory/protos/generated.rs"); -/// /// Include service code. -/// include!("/relative/protobuf/directory/proto/helloworld_grpc.pb.rs"); +/// include!("/protobuf/directory/protos/generated.rs"); +/// +/// // Include service code. +/// include!("/protobuf/directory/protos/helloworld_grpc.pb.rs"); /// } -///``` +/// ``` +/// +/// If the message code and service code are in different modules, and the +/// message path specified during code generation is `super::protos`, use: /// -/// If the message code is not in the same module. The following example uses -/// message path as `super::protos`. /// ```rust,ignore /// mod protos { /// // Include message code. -/// include!("/relative/protobuf/directory/protos/generated.rs"); +/// include!("/protobuf/directory/protos/generated.rs"); /// } /// /// mod grpc { -/// /// Include service code. -/// include!("/relative/protobuf/directory/proto/helloworld_grpc.pb.rs"); +/// // Include service code. +/// include!("/protobuf/directory/proto/helloworld_grpc.pb.rs"); /// } /// ``` +/// /// [`OUT_DIR`]: https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts #[macro_export] macro_rules! include_proto { diff --git a/protoc-gen-rust-grpc/.gitignore b/protoc-gen-rust-grpc/.gitignore index 3932fccd9..6a0537992 100644 --- a/protoc-gen-rust-grpc/.gitignore +++ b/protoc-gen-rust-grpc/.gitignore @@ -1,8 +1,7 @@ # Bazel bazel-bin -bazel-examples bazel-genfiles -bazel-grpc-java bazel-out +bazel-protoc-gen-rust-grpc bazel-testlogs MODULE.bazel.lock diff --git a/protoc-gen-rust-grpc/README.md b/protoc-gen-rust-grpc/README.md index 2dd472c27..6d62516c3 100644 --- a/protoc-gen-rust-grpc/README.md +++ b/protoc-gen-rust-grpc/README.md @@ -1,29 +1,67 @@ -## Usage example +## Build + +To build the Rust gRPC code generator plugin: + ```sh -# Build the plugin with Bazel +bazel build //src:protoc-gen-rust-grpc +``` + + +## Usage Example + +```sh +# Build the plugin bazel build //src:protoc-gen-rust-grpc # Set the plugin path PLUGIN_PATH="$(pwd)/bazel-bin/src/protoc-gen-rust-grpc" -# Run protoc with the Rust and gRPC plugins +# Run protoc with the Rust and Rust gRPC plugins protoc \ --plugin=protoc-gen-grpc-rust="$PLUGIN_PATH" \ --rust_opt="experimental-codegen=enabled,kernel=upb" \ - --rust_out=./tmp \ + --rust_out=./generated \ --rust-grpc_opt="experimental-codegen=enabled" \ - --rust-grpc_out=./tmp \ + --rust-grpc_out=./generated \ routeguide.proto -``` -## Build -```sh -bazel build //src:protoc-gen-rust-grpc +## Optionally, you can add the plugin to the PATH and omit the --plugin flag. +export PATH="$(pwd)/bazel-bin/src/:$PATH" ``` -## Language Server Support for development -Generate compile_commands.json using bazel plugin. Configure the language -server to use the generate json file. +## Available Options + +These options are specific to the Rust gRPC plugin: + +* `experimental-codegen=enabled`: **Required.** Enables compatibility with the +experimental Rust codegen. +* `message_module_path=PATH` (optional): Specifies the Rust path to the module +where Protobuf messages are defined. Use this when you plan to place the +generated message code in a different module than the service code. + + * Default: `self` + * Example: If your messages are in `crate::pb::routeguide`, use + `message_module_path=crate::pb::routeguide`. +* `crate_mapping=PATH` (optional): Specifies the path to a crate mapping file + generated by Bazel or another build system. You must pass the same mapping + file to `rust_opt` and `rust-grpc_opt`. The file contains: + ``` + \n + \n + \n + ... + \n + ``` + + +## Language Server Support + +To enable IDE features like code navigation and IntelliSense, generate +`compile_commands.json` using [Hedron Compile Commands](https://github.com/hedronvision/bazel-compile-commands-extractor): + ```sh bazel run @hedron_compile_commands//:refresh_all ``` + +Then configure your C++ language server to use the generated +`compile_commands.json`. diff --git a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc index 178177dd3..9b5e2079a 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc +++ b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc @@ -63,19 +63,25 @@ static std::string RsTypePath(Context &ctx, const absl::string_view &path_within_module, const GrpcOpts &opts, int depth) { // If the message type is defined in an external crate using the crate - // mapping, the path must begin ::. If the message type is in the same - // crate, add the relative path to the message module. + // mapping, the path must begin ::. if (absl::StartsWith(path_within_module, "::")) { return std::string(path_within_module); } - std::string prefix = ""; - for (int i = 0; i < depth; ++i) { - prefix += "super::"; - } std::string path_to_message_module = opts.message_module_path + "::"; if (path_to_message_module == "self::") { path_to_message_module = ""; } + + // If the path to the message module is defined from the crate or global + // root, we don't need to add a prefix of "super::"s. + if (absl::StartsWith(path_to_message_module, "crate::") || + absl::StartsWith(path_to_message_module, "::")) { + depth = 0; + } + std::string prefix = ""; + for (int i = 0; i < depth; ++i) { + prefix += "super::"; + } return prefix + path_to_message_module + std::string(path_within_module); } From fe31b76283f2e5f841746bb453a42c212aa1c6ea Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Sun, 6 Jul 2025 21:05:19 +0530 Subject: [PATCH 11/26] Fix test --- interop/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/interop/Cargo.toml b/interop/Cargo.toml index ef7a31065..1ab28c77d 100644 --- a/interop/Cargo.toml +++ b/interop/Cargo.toml @@ -13,7 +13,7 @@ name = "server" path = "src/bin/server.rs" [[bin]] -name = "client_grpc" +name = "client_new_codegen" path = "src/bin/client_new_codegen.rs" [dependencies] From 89d73316bbd0fde4a8ffd2e985ab61edd2d82bd3 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Tue, 8 Jul 2025 20:19:11 +0530 Subject: [PATCH 12/26] Avoid using protobuf rust's context --- grpc-build/src/lib.rs | 3 +- protoc-gen-rust-grpc/README.md | 3 - .../src/grpc_rust_generator.cc | 172 +++++++++++++----- .../src/grpc_rust_generator.h | 48 ++++- protoc-gen-rust-grpc/src/grpc_rust_plugin.cc | 61 ++----- 5 files changed, 187 insertions(+), 100 deletions(-) diff --git a/grpc-build/src/lib.rs b/grpc-build/src/lib.rs index 009100d27..1cf8a1e99 100644 --- a/grpc-build/src/lib.rs +++ b/grpc-build/src/lib.rs @@ -176,8 +176,7 @@ impl CodeGen { } } - cmd.arg(format!("--rust-grpc_out={}", self.output_dir.display())) - .arg("--rust-grpc_opt=experimental-codegen=enabled"); + cmd.arg(format!("--rust-grpc_out={}", self.output_dir.display())); cmd.arg(format!( "--rust-grpc_opt=crate_mapping={}", crate_mapping_path.display() diff --git a/protoc-gen-rust-grpc/README.md b/protoc-gen-rust-grpc/README.md index 6d62516c3..3aafaa309 100644 --- a/protoc-gen-rust-grpc/README.md +++ b/protoc-gen-rust-grpc/README.md @@ -21,7 +21,6 @@ protoc \ --plugin=protoc-gen-grpc-rust="$PLUGIN_PATH" \ --rust_opt="experimental-codegen=enabled,kernel=upb" \ --rust_out=./generated \ - --rust-grpc_opt="experimental-codegen=enabled" \ --rust-grpc_out=./generated \ routeguide.proto @@ -33,8 +32,6 @@ export PATH="$(pwd)/bazel-bin/src/:$PATH" These options are specific to the Rust gRPC plugin: -* `experimental-codegen=enabled`: **Required.** Enables compatibility with the -experimental Rust codegen. * `message_module_path=PATH` (optional): Specifies the Rust path to the module where Protobuf messages are defined. Use this when you plan to place the generated message code in a different module than the service code. diff --git a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc index 9b5e2079a..c20b59254 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc +++ b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc @@ -24,6 +24,7 @@ #include "src/grpc_rust_generator.h" +#include "absl/strings/str_join.h" #include "absl/strings/str_replace.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" @@ -40,11 +41,12 @@ namespace rust_grpc_generator { namespace protobuf = google::protobuf; namespace rust = protobuf::compiler::rust; +using google::protobuf::FileDescriptor; using protobuf::Descriptor; using protobuf::MethodDescriptor; using protobuf::ServiceDescriptor; using protobuf::SourceLocation; -using protobuf::compiler::rust::Context; +using protobuf::io::Printer; template static std::string @@ -57,15 +59,50 @@ GrpcGetCommentsForDescriptor(const DescriptorType *descriptor) { return std::string(); } -/// Returns the path of a generated message struct relative to the module in the -/// generated service code. -static std::string RsTypePath(Context &ctx, - const absl::string_view &path_within_module, - const GrpcOpts &opts, int depth) { - // If the message type is defined in an external crate using the crate - // mapping, the path must begin ::. - if (absl::StartsWith(path_within_module, "::")) { - return std::string(path_within_module); +static std::string +RustModuleForContainingType(const GrpcOpts &opts, + const Descriptor *containing_type, + const FileDescriptor &file) { + std::vector modules; + // Innermost to outermost order. + const Descriptor *parent = containing_type; + while (parent != nullptr) { + modules.push_back(rust::RsSafeName(rust::CamelToSnakeCase(parent->name()))); + parent = parent->containing_type(); + } + + // Reverse the vector to get submodules in outer-to-inner order). + std::reverse(modules.begin(), modules.end()); + + // If there are any modules at all, push an empty string on the end so that + // we get the trailing :: + if (!modules.empty()) { + modules.push_back(""); + } + + std::string crate_relative = absl::StrJoin(modules, "::"); + + if (opts.is_file_in_current_crate(file)) { + return crate_relative; + } + std::string crate_name = + absl::StrCat("::", rust::RsSafeName(opts.get_crate_name(file.name()))); + + return absl::StrCat(crate_name, "::", crate_relative); +} + +static std::string RsTypePathWithinMessageModule(const GrpcOpts &opts, + const Descriptor &msg) { + return absl::StrCat( + RustModuleForContainingType(opts, msg.containing_type(), *msg.file()), + rust::RsSafeName(msg.name())); +} + +static std::string RsTypePath(const Descriptor &msg, const GrpcOpts &opts, + int depth) { + std::string path_within_module = RsTypePathWithinMessageModule(opts, msg); + if (!opts.is_file_in_current_crate(*msg.file())) { + return path_within_module; } std::string path_to_message_module = opts.message_module_path + "::"; if (path_to_message_module == "self::") { @@ -85,6 +122,61 @@ static std::string RsTypePath(Context &ctx, return prefix + path_to_message_module + std::string(path_within_module); } +struct File { + static absl::Status ReadFileToString(const std::string &name, + std::string *output, bool text_mode) { + char buffer[1024]; + FILE *file = fopen(name.c_str(), text_mode ? "rt" : "rb"); + if (file == nullptr) + return absl::NotFoundError("Could not open file"); + + while (true) { + size_t n = fread(buffer, 1, sizeof(buffer), file); + if (n <= 0) + break; + output->append(buffer, n); + } + + int error = ferror(file); + if (fclose(file) != 0) + return absl::InternalError("Failed to close file"); + if (error != 0) { + return absl::InternalError(absl::StrCat("Failed to read the file ", name, + ". Error code: ", error)); + } + return absl::OkStatus(); + } +}; + +absl::StatusOr> +GetImportPathToCrateNameMap(const std::string &mapping_file_path) { + absl::flat_hash_map mapping; + std::string mapping_contents; + absl::Status status = + File::ReadFileToString(mapping_file_path, &mapping_contents, true); + if (!status.ok()) { + return status; + } + + std::vector lines = + absl::StrSplit(mapping_contents, '\n', absl::SkipEmpty()); + size_t len = lines.size(); + + size_t idx = 0; + while (idx < len) { + absl::string_view crate_name = lines[idx++]; + size_t files_cnt; + if (!absl::SimpleAtoi(lines[idx++], &files_cnt)) { + return absl::InvalidArgumentError( + "Couldn't parse number of import paths in mapping file"); + } + for (size_t i = 0; i < files_cnt; ++i) { + mapping.insert({std::string(lines[idx++]), std::string(crate_name)}); + } + } + return mapping; +} + /** * Method generation abstraction. * @@ -129,9 +221,9 @@ class Method { * @return A string representing the qualified name for the generated request * struct. */ - std::string request_name(rust::Context &ctx) const { + std::string request_name(const GrpcOpts &opts, int depth) const { const Descriptor *input = method_->input_type(); - return rust::RsTypePath(ctx, *input); + return RsTypePath(*input, opts, depth); }; /** @@ -140,9 +232,9 @@ class Method { * @return A string representing the qualified name for the generated response * struct. */ - std::string response_name(rust::Context &ctx) const { + std::string response_name(const GrpcOpts &opts, int depth) const { const Descriptor *output = method_->output_type(); - return rust::RsTypePath(ctx, *output); + return RsTypePath(*output, opts, depth); }; }; @@ -237,11 +329,11 @@ static std::string ProtoCommentToRustDoc(absl::string_view proto_comment) { return rust_doc; } -static void GenerateDeprecated(Context &ctx) { ctx.Emit("#[deprecated]\n"); } +static void GenerateDeprecated(Printer &ctx) { ctx.Emit("#[deprecated]\n"); } namespace client { -static void GenerateMethods(Context &ctx, const Service &service, +static void GenerateMethods(Printer &printer, const Service &service, const GrpcOpts &opts) { static std::string unary_format = R"rs( pub async fn $ident$( @@ -309,54 +401,52 @@ static void GenerateMethods(Context &ctx, const Service &service, const std::vector methods = service.methods(); for (const Method &method : methods) { - ctx.Emit(ProtoCommentToRustDoc(method.comment())); + printer.Emit(ProtoCommentToRustDoc(method.comment())); if (method.is_deprecated()) { - GenerateDeprecated(ctx); + GenerateDeprecated(printer); } - const std::string request_type = - RsTypePath(ctx, method.request_name(ctx), opts, 1); - const std::string response_type = - RsTypePath(ctx, method.response_name(ctx), opts, 1); + const std::string request_type = method.request_name(opts, 1); + const std::string response_type = method.response_name(opts, 1); { - auto vars = ctx.printer().WithVars( - {{"codec_name", "grpc::codec::protobuf::ProtoCodec"}, - {"ident", method.name()}, - {"request", request_type}, - {"response", response_type}, - {"service_name", service.full_name()}, - {"path", FormatMethodPath(service, method)}, - {"method_name", method.proto_field_name()}}); + auto vars = + printer.WithVars({{"codec_name", "grpc::codec::protobuf::ProtoCodec"}, + {"ident", method.name()}, + {"request", request_type}, + {"response", response_type}, + {"service_name", service.full_name()}, + {"path", FormatMethodPath(service, method)}, + {"method_name", method.proto_field_name()}}); if (!method.is_client_streaming() && !method.is_server_streaming()) { - ctx.Emit(unary_format); + printer.Emit(unary_format); } else if (!method.is_client_streaming() && method.is_server_streaming()) { - ctx.Emit(server_streaming_format); + printer.Emit(server_streaming_format); } else if (method.is_client_streaming() && !method.is_server_streaming()) { - ctx.Emit(client_streaming_format); + printer.Emit(client_streaming_format); } else { - ctx.Emit(streaming_format); + printer.Emit(streaming_format); } if (&method != &methods.back()) { - ctx.Emit("\n"); + printer.Emit("\n"); } } } } -static void generate_client(const Service &service, Context &ctx, +static void generate_client(const Service &service, Printer &printer, const GrpcOpts &opts) { std::string service_ident = absl::StrFormat("%sClient", service.name()); std::string client_mod = absl::StrFormat("%s_client", rust::CamelToSnakeCase(service.name())); - ctx.Emit( + printer.Emit( { {"client_mod", client_mod}, {"service_ident", service_ident}, {"service_doc", - [&] { ctx.Emit(ProtoCommentToRustDoc(service.comment())); }}, - {"methods", [&] { GenerateMethods(ctx, service, opts); }}, + [&] { printer.Emit(ProtoCommentToRustDoc(service.comment())); }}, + {"methods", [&] { GenerateMethods(printer, service, opts); }}, }, R"rs( /// Generated client implementations. @@ -462,12 +552,12 @@ namespace server {} // namespace server // Writes the generated service interface into the given // ZeroCopyOutputStream. -void GenerateService(Context &rust_generator_context, +void GenerateService(protobuf::io::Printer &printer, const ServiceDescriptor *service_desc, const GrpcOpts &opts) { const Service service = Service(service_desc); - client::generate_client(service, rust_generator_context, opts); + client::generate_client(service, printer, opts); } std::string GetRsGrpcFile(const protobuf::FileDescriptor &file) { diff --git a/protoc-gen-rust-grpc/src/grpc_rust_generator.h b/protoc-gen-rust-grpc/src/grpc_rust_generator.h index d0f61ffaa..44cedcdf4 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_generator.h +++ b/protoc-gen-rust-grpc/src/grpc_rust_generator.h @@ -25,6 +25,7 @@ #ifndef NET_GRPC_COMPILER_RUST_GENERATOR_H_ #define NET_GRPC_COMPILER_RUST_GENERATOR_H_ +#include "absl/log/absl_log.h" #include #include #include @@ -36,19 +37,56 @@ namespace protobuf = google::protobuf; } // namespace impl class GrpcOpts { +public: /// Path the module containing the generated message code. Defaults to /// "self", i.e. the message code and service code is present in the same /// module. -public: - std::string message_module_path; + std::string message_module_path = "self"; + absl::flat_hash_map import_path_to_crate_name = {}; + std::vector files_in_current_crate = + {}; + + absl::string_view get_crate_name(absl::string_view import_path) const { + auto it = import_path_to_crate_name.find(import_path); + if (it == import_path_to_crate_name.end()) { + ABSL_LOG(ERROR) << "Path " << import_path + << " not found in crate mapping. Crate mapping contains " + << import_path_to_crate_name.size() << " entries:"; + for (const auto &entry : import_path_to_crate_name) { + ABSL_LOG(ERROR) << " " << entry.first << " : " << entry.second << "\n"; + } + ABSL_LOG(FATAL) << "Cannot continue with missing crate mapping."; + } + return it->second; + } + + bool is_file_in_current_crate(const impl::protobuf::FileDescriptor &f) const { + return std::find(files_in_current_crate.begin(), + files_in_current_crate.end(), + &f) != files_in_current_crate.end(); + } }; // Writes the generated service interface into the given ZeroCopyOutputStream -void GenerateService( - impl::protobuf::compiler::rust::Context &rust_generator_context, - const impl::protobuf::ServiceDescriptor *service, const GrpcOpts &opts); +void GenerateService(impl::protobuf::io::Printer &printer, + const impl::protobuf::ServiceDescriptor *service, + const GrpcOpts &opts); std::string GetRsGrpcFile(const impl::protobuf::FileDescriptor &file); + +// Returns a map from import path of a .proto file to the name of the crate +// covering that file. +// +// This function parses a .rust_crate_mapping file generated by a build system. +// The file contains: +// +// \n +// \n +// > +GetImportPathToCrateNameMap(const std::string &mapping_file_path); } // namespace rust_grpc_generator #endif // NET_GRPC_COMPILER_RUST_GENERATOR_H_ diff --git a/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc b/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc index 78545d02a..e73e0543f 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc +++ b/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc @@ -31,19 +31,6 @@ #include namespace protobuf = google::protobuf; -namespace rust = google::protobuf::compiler::rust; - -static std::string ReconstructParameterList( - const std::vector> &options) { - std::string result; - for (const auto &[key, value] : options) { - if (!result.empty()) { - result += ","; - } - result += key + "=" + value; - } - return result; -} class RustGrpcGenerator : public protobuf::compiler::CodeGenerator { public: @@ -76,56 +63,32 @@ class RustGrpcGenerator : public protobuf::compiler::CodeGenerator { std::vector> options; protobuf::compiler::ParseGeneratorParameter(parameter, &options); - // Filter out GRPC options. - std::vector> protobuf_options; rust_grpc_generator::GrpcOpts grpc_opts; + absl::StatusOr> + import_path_to_crate_name; for (auto opt : options) { if (opt.first == "message_module_path") { grpc_opts.message_module_path = opt.second; - } else { - protobuf_options.push_back(opt); + } else if (opt.first == "crate_mapping") { + absl::StatusOr status = + rust_grpc_generator::GetImportPathToCrateNameMap(opt.second); + if (!status.ok()) { + *error = std::string(status.status().message()); + return false; + } + import_path_to_crate_name = status.value(); } } - if (grpc_opts.message_module_path.empty()) { - grpc_opts.message_module_path = "self"; - } - - // The kernel isn't used by gRPC, it is there to pass Rust protobuf's - // validation. - protobuf_options.emplace_back("kernel", "upb"); - - // Copied from protobuf rust's generator.cc. - absl::StatusOr opts = - rust::Options::Parse(ReconstructParameterList(protobuf_options)); - if (!opts.ok()) { - *error = std::string(opts.status().message()); - return false; - } - - std::vector files_in_current_crate; - context->ListParsedFiles(&files_in_current_crate); - - absl::StatusOr> - import_path_to_crate_name = rust::GetImportPathToCrateNameMap(&*opts); - if (!import_path_to_crate_name.ok()) { - *error = std::string(import_path_to_crate_name.status().message()); - return false; - } - - rust::RustGeneratorContext rust_generator_context( - &files_in_current_crate, &*import_path_to_crate_name); + context->ListParsedFiles(&grpc_opts.files_in_current_crate); - rust::Context ctx_without_printer(&*opts, &rust_generator_context, nullptr, - std::vector()); auto outfile = absl::WrapUnique( context->Open(rust_grpc_generator::GetRsGrpcFile(*file))); protobuf::io::Printer printer(outfile.get()); - rust::Context ctx = ctx_without_printer.WithPrinter(&printer); for (int i = 0; i < file->service_count(); ++i) { const protobuf::ServiceDescriptor *service = file->service(i); - rust_grpc_generator::GenerateService(ctx, service, grpc_opts); + rust_grpc_generator::GenerateService(printer, service, grpc_opts); } return true; } From 9305da79a8afa46c4369d595ca390cb9030bb681 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Wed, 9 Jul 2025 14:26:39 +0530 Subject: [PATCH 13/26] create a seperate module for the protobuf codec --- Cargo.toml | 3 ++- grpc/Cargo.toml | 4 +-- grpc/src/codec/mod.rs | 26 ------------------- grpc/src/lib.rs | 1 - interop/Cargo.toml | 5 ++-- interop/build.rs | 2 +- .../src/grpc_rust_generator.cc | 2 +- .../Cargo.toml | 2 +- .../src/lib.rs | 0 tonic-protobuf/Cargo.toml | 17 ++++++++++++ .../protobuf.rs => tonic-protobuf/src/lib.rs | 0 11 files changed, 26 insertions(+), 36 deletions(-) delete mode 100644 grpc/src/codec/mod.rs rename {grpc-build => tonic-protobuf-build}/Cargo.toml (84%) rename {grpc-build => tonic-protobuf-build}/src/lib.rs (100%) create mode 100644 tonic-protobuf/Cargo.toml rename grpc/src/codec/protobuf.rs => tonic-protobuf/src/lib.rs (100%) diff --git a/Cargo.toml b/Cargo.toml index 604a2e118..f0d796476 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,13 +3,14 @@ members = [ "tonic", "tonic-build", "tonic-health", + "tonic-protobuf", + "tonic-protobuf-build", "tonic-types", "tonic-reflection", "tonic-web", # Non-published crates "examples", "codegen", "grpc", - "grpc-build", "interop", # Tests "tests/disable_comments", "tests/included_service", diff --git a/grpc/Cargo.toml b/grpc/Cargo.toml index d20aab44c..f57666b5b 100644 --- a/grpc/Cargo.toml +++ b/grpc/Cargo.toml @@ -15,16 +15,14 @@ serde = "1.0.219" hickory-resolver = { version = "0.25.1", optional = true } rand = "0.8.5" parking_lot = "0.12.4" -protobuf = { version = "4.31.1-release", optional = true } bytes = "1.10.1" [dev-dependencies] hickory-server = "0.25.2" [features] -default = ["dns", "protobuf"] +default = ["dns"] dns = ["dep:hickory-resolver"] -protobuf = ["dep:protobuf"] [package.metadata.cargo_check_external_types] allowed_external_types = [ diff --git a/grpc/src/codec/mod.rs b/grpc/src/codec/mod.rs deleted file mode 100644 index fecc13af7..000000000 --- a/grpc/src/codec/mod.rs +++ /dev/null @@ -1,26 +0,0 @@ -/* - * - * Copyright 2025 gRPC authors. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - */ - -#[cfg(feature = "protobuf")] -pub mod protobuf; diff --git a/grpc/src/lib.rs b/grpc/src/lib.rs index 38c48d28d..e86fdf811 100644 --- a/grpc/src/lib.rs +++ b/grpc/src/lib.rs @@ -33,7 +33,6 @@ #![allow(dead_code)] pub mod client; -pub mod codec; mod macros; mod rt; pub mod service; diff --git a/interop/Cargo.toml b/interop/Cargo.toml index 1ab28c77d..a91786fa0 100644 --- a/interop/Cargo.toml +++ b/interop/Cargo.toml @@ -29,9 +29,10 @@ tokio-stream = "0.1" tonic = {path = "../tonic", features = ["tls-ring"]} tower = "0.5" tracing-subscriber = {version = "0.3"} -protobuf = { version = "4.31.1-release" } grpc = {path = "../grpc"} +protobuf = { version = "4.31.1-release"} +tonic-protobuf = {path = "../tonic-protobuf"} [build-dependencies] tonic-build = {path = "../tonic-build", features = ["prost"]} -grpc-build = {path = "../grpc-build"} +tonic-protobuf-build = {path = "../tonic-protobuf-build"} diff --git a/interop/build.rs b/interop/build.rs index b309b073f..133896f02 100644 --- a/interop/build.rs +++ b/interop/build.rs @@ -2,7 +2,7 @@ fn main() { let proto = "proto/grpc/testing/test.proto"; tonic_build::compile_protos(proto).unwrap(); - grpc_build::CodeGen::new() + tonic_protobuf_build::CodeGen::new() .include("proto/grpc/testing") .inputs(["test.proto", "empty.proto", "messages.proto"]) .generate_and_compile() diff --git a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc index c20b59254..7bcd9986c 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc +++ b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc @@ -409,7 +409,7 @@ static void GenerateMethods(Printer &printer, const Service &service, const std::string response_type = method.response_name(opts, 1); { auto vars = - printer.WithVars({{"codec_name", "grpc::codec::protobuf::ProtoCodec"}, + printer.WithVars({{"codec_name", "tonic_protobuf::ProtoCodec"}, {"ident", method.name()}, {"request", request_type}, {"response", response_type}, diff --git a/grpc-build/Cargo.toml b/tonic-protobuf-build/Cargo.toml similarity index 84% rename from grpc-build/Cargo.toml rename to tonic-protobuf-build/Cargo.toml index 375f82ad9..2806b6d49 100644 --- a/grpc-build/Cargo.toml +++ b/tonic-protobuf-build/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "grpc-build" +name = "tonic-protobuf-build" version = "0.9.0-alpha.1" edition = "2021" authors = ["gRPC Authors"] diff --git a/grpc-build/src/lib.rs b/tonic-protobuf-build/src/lib.rs similarity index 100% rename from grpc-build/src/lib.rs rename to tonic-protobuf-build/src/lib.rs diff --git a/tonic-protobuf/Cargo.toml b/tonic-protobuf/Cargo.toml new file mode 100644 index 000000000..b830fa198 --- /dev/null +++ b/tonic-protobuf/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "tonic-protobuf" +version = "0.9.0-alpha.1" +edition = "2021" +authors = ["gRPC Authors"] +license = "MIT" + +[dependencies] +tonic = { version = "0.14.0", path = "../tonic", default-features = false, features = ["codegen"] } +bytes = "1.10.1" +protobuf = { version = "4.31.1-release" } + +[package.metadata.cargo_check_external_types] +allowed_external_types = [ + "tonic::*", + "protobuf::codegen_traits::Message", +] diff --git a/grpc/src/codec/protobuf.rs b/tonic-protobuf/src/lib.rs similarity index 100% rename from grpc/src/codec/protobuf.rs rename to tonic-protobuf/src/lib.rs From ca8d63826bdc027e374a636e9429d5ae98859a6e Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Wed, 9 Jul 2025 16:31:10 +0530 Subject: [PATCH 14/26] Add readme for codegen --- protoc-gen-rust-grpc/README.md | 10 +++- tonic-protobuf-build/README.md | 94 +++++++++++++++++++++++++++++++++ tonic-protobuf-build/src/lib.rs | 6 +-- 3 files changed, 105 insertions(+), 5 deletions(-) create mode 100644 tonic-protobuf-build/README.md diff --git a/protoc-gen-rust-grpc/README.md b/protoc-gen-rust-grpc/README.md index 3aafaa309..d9533c3c1 100644 --- a/protoc-gen-rust-grpc/README.md +++ b/protoc-gen-rust-grpc/README.md @@ -9,6 +9,12 @@ bazel build //src:protoc-gen-rust-grpc ## Usage Example +**Note:** It's generally recommended to use `tonic_protobuf_build::CodeGen` +and/or `protobuf_codegen::CodeGen` instead of invoking `protoc` directly. Direct +usage of `protoc` and checking in the generated code can lead to stale output if +the `protobuf` dependencies are upgraded later. Using the codegen APIs ensures +consistency with your dependency versions and simplifies regeneration. + ```sh # Build the plugin bazel build //src:protoc-gen-rust-grpc @@ -16,7 +22,7 @@ bazel build //src:protoc-gen-rust-grpc # Set the plugin path PLUGIN_PATH="$(pwd)/bazel-bin/src/protoc-gen-rust-grpc" -# Run protoc with the Rust and Rust gRPC plugins +# Run protoc with the Rust gRPC plugin protoc \ --plugin=protoc-gen-grpc-rust="$PLUGIN_PATH" \ --rust_opt="experimental-codegen=enabled,kernel=upb" \ @@ -24,7 +30,7 @@ protoc \ --rust-grpc_out=./generated \ routeguide.proto -## Optionally, you can add the plugin to the PATH and omit the --plugin flag. +# Optionally, you can add the plugin to the PATH and omit the --plugin flag. export PATH="$(pwd)/bazel-bin/src/:$PATH" ``` diff --git a/tonic-protobuf-build/README.md b/tonic-protobuf-build/README.md new file mode 100644 index 000000000..e47870eaa --- /dev/null +++ b/tonic-protobuf-build/README.md @@ -0,0 +1,94 @@ +# tonic-protobuf-build + +Compiles proto files via protobuf rust and generates service stubs and proto +definitions for use with tonic. + +## Features + +Required dependencies + +```toml +[dependencies] +tonic = "" +protobuf = "" + +[build-dependencies] +tonic-protobuf-build = "" +``` + +You must ensure you have the following programs in your PATH: +1. protoc +1. protoc-gen-rust-grpc + +## Getting Started + +`tonic-protobuf-build` works by being included as a [`build.rs` file](https://doc.rust-lang.org/cargo/reference/build-scripts.html) at the root of the binary/library. + +You can rely on the defaults via + +```rust,no_run +fn main() -> Result<(), Box> { + tonic_protobuf_build::CodeGen::new() + .include("proto") + .inputs(["service.proto"]) + .generate_and_compile()?; + Ok(()) +} +``` + +Or configure the generated code deeper via + +```rust,no_run +fn main() -> Result<(), Box> { + tonic_protobuf_build::configure() + .generate_message_code(false) + .inputs(["proto/helloworld/helloworld.proto"]) + .include("external") + .message_module_path("super::proto") + .dependencies(vec![tonic_protobuf_build::Dependency { + crate_name: "external_protos".to_string(), + proto_import_paths: vec![PathBuf::from("external/message.proto")], + proto_files: vec!["message.proto".to_string()], + }]) + //.out_dir("src/generated") // you can change the generated code's location + .generate_and_compile()?; + Ok(()) +} +``` + +Then you can reference the generated Rust like this this in your code: +```rust,ignore +mod protos { + // Include message code. + include!(concat!(env!("OUT_DIR"), "proto/helloworld/generated.rs")); +} + +mod grpc { + // Include service code. + include!(concat!(env!("OUT_DIR"), "proto/helloworld/helloworld_grpc.pb.rs")); +} +``` + +If you don't modify the `message_module_path`, you can use the `include_proto` +macro to simplify the import code. +```rust,ignore +pub mod grpc_pb { + grpc::include_proto!("proto/helloworld", "helloworld"); +} +``` + +Or if you want to save the generated code in your own code base, +you can uncomment the line `.output_dir(...)` above, and in your lib file +config a mod like this: +```rust,ignore +pub mod generated { + #[path = ""] + pub mod helloworld { + #[path = "generated.rs"] + pub mod proto; + + #[path = "test_grpc.pb.rs"] + pub mod grpc; + } +} +``` diff --git a/tonic-protobuf-build/src/lib.rs b/tonic-protobuf-build/src/lib.rs index 1cf8a1e99..10c20c7af 100644 --- a/tonic-protobuf-build/src/lib.rs +++ b/tonic-protobuf-build/src/lib.rs @@ -122,9 +122,9 @@ impl CodeGen { self } - /// Adds a Rust crate along with a list of proto files whose generated - /// messages it contains. - pub fn dependency(&mut self, deps: Vec) -> &mut Self { + /// Adds a list of Rust crates along with the proto files whose generated + /// messages they contains. + pub fn dependencies(&mut self, deps: Vec) -> &mut Self { self.dependencies.extend(deps); self } From 08c6be10aad7135d1e3ed5420a7c851b0967f89e Mon Sep 17 00:00:00 2001 From: Arjan Singh Bal <46515553+arjan-bal@users.noreply.github.com> Date: Fri, 11 Jul 2025 15:26:12 +0530 Subject: [PATCH 15/26] Apply suggestions from code review Co-authored-by: Doug Fawley --- protoc-gen-rust-grpc/src/grpc_rust_generator.h | 4 ++-- protoc-gen-rust-grpc/src/grpc_rust_plugin.cc | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/protoc-gen-rust-grpc/src/grpc_rust_generator.h b/protoc-gen-rust-grpc/src/grpc_rust_generator.h index 44cedcdf4..2f7b591f3 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_generator.h +++ b/protoc-gen-rust-grpc/src/grpc_rust_generator.h @@ -38,8 +38,8 @@ namespace protobuf = google::protobuf; class GrpcOpts { public: - /// Path the module containing the generated message code. Defaults to - /// "self", i.e. the message code and service code is present in the same + /// Path of the module containing the generated message code. Defaults to + /// "self", i.e. the message code and service code are present in the same /// module. std::string message_module_path = "self"; absl::flat_hash_map import_path_to_crate_name = {}; diff --git a/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc b/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc index e73e0543f..0d6bbf878 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc +++ b/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc @@ -97,5 +97,4 @@ class RustGrpcGenerator : public protobuf::compiler::CodeGenerator { int main(int argc, char *argv[]) { RustGrpcGenerator generator; return protobuf::compiler::PluginMain(argc, argv, &generator); - return 0; } From 4431e832225c925d420b365c0c4175afc512d396 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Fri, 11 Jul 2025 15:48:51 +0530 Subject: [PATCH 16/26] copyright in build files, clippy fixes and typos in README --- interop/src/client_new_codegen.rs | 14 -------------- protoc-gen-rust-grpc/MODULE.bazel | 20 ++++++++++++++++++++ protoc-gen-rust-grpc/README.md | 2 +- protoc-gen-rust-grpc/src/BUILD | 26 +++++++++++++++++++++++--- tonic-protobuf-build/src/lib.rs | 5 ++--- tonic-protobuf/src/lib.rs | 4 ++-- 6 files changed, 48 insertions(+), 23 deletions(-) diff --git a/interop/src/client_new_codegen.rs b/interop/src/client_new_codegen.rs index 8afa386ad..6823a0c5c 100644 --- a/interop/src/client_new_codegen.rs +++ b/interop/src/client_new_codegen.rs @@ -92,20 +92,6 @@ pub async fn large_unary(client: &mut TestClient, assertions: &mut Vec) { -// let payload = Payload { -// r#type: PayloadType::Compressable as i32, -// body: format!("{:?}", std::time::Instant::now()).into_bytes(), -// }; -// let req = SimpleRequest { -// response_type: PayloadType::Compressable as i32, -// payload: Some(payload), -// ..Default::default() -// }; - -// client. -// } - pub async fn client_streaming(client: &mut TestClient, assertions: &mut Vec) { let requests = REQUEST_LENGTHS.iter().map(|len| { proto!(StreamingInputCallRequest { diff --git a/protoc-gen-rust-grpc/MODULE.bazel b/protoc-gen-rust-grpc/MODULE.bazel index 2e577e648..a4ed1e2b8 100644 --- a/protoc-gen-rust-grpc/MODULE.bazel +++ b/protoc-gen-rust-grpc/MODULE.bazel @@ -1,3 +1,23 @@ +# Copyright 2025 gRPC authors. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + bazel_dep(name = "protobuf", repo_name = "com_google_protobuf", version = "31.1") # Hedron's Compile Commands Extractor for bazel diff --git a/protoc-gen-rust-grpc/README.md b/protoc-gen-rust-grpc/README.md index d9533c3c1..d7ddf785f 100644 --- a/protoc-gen-rust-grpc/README.md +++ b/protoc-gen-rust-grpc/README.md @@ -24,7 +24,7 @@ PLUGIN_PATH="$(pwd)/bazel-bin/src/protoc-gen-rust-grpc" # Run protoc with the Rust gRPC plugin protoc \ - --plugin=protoc-gen-grpc-rust="$PLUGIN_PATH" \ + --plugin=protoc-gen-rust-grpc="$PLUGIN_PATH" \ --rust_opt="experimental-codegen=enabled,kernel=upb" \ --rust_out=./generated \ --rust-grpc_out=./generated \ diff --git a/protoc-gen-rust-grpc/src/BUILD b/protoc-gen-rust-grpc/src/BUILD index 8a66dfa25..1a3553829 100644 --- a/protoc-gen-rust-grpc/src/BUILD +++ b/protoc-gen-rust-grpc/src/BUILD @@ -1,9 +1,29 @@ +# Copyright 2025 gRPC authors. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + cc_binary( name = "protoc-gen-rust-grpc", srcs = [ - "grpc_rust_plugin.cc", - "grpc_rust_generator.h", - "grpc_rust_generator.cc", + "grpc_rust_plugin.cc", + "grpc_rust_generator.h", + "grpc_rust_generator.cc", ], visibility = ["//visibility:public"], deps = [ diff --git a/tonic-protobuf-build/src/lib.rs b/tonic-protobuf-build/src/lib.rs index 10c20c7af..7e1bd0aea 100644 --- a/tonic-protobuf-build/src/lib.rs +++ b/tonic-protobuf-build/src/lib.rs @@ -183,8 +183,7 @@ impl CodeGen { )); if let Some(message_path) = &self.message_module_path { cmd.arg(format!( - "--rust-grpc_opt=message_module_path={}", - message_path + "--rust-grpc_opt=message_module_path={message_path}", )); } @@ -199,7 +198,7 @@ impl CodeGen { let output = cmd .output() - .map_err(|e| format!("failed to run protoc: {}", e))?; + .map_err(|e| format!("failed to run protoc: {e}"))?; println!("{}", std::str::from_utf8(&output.stdout).unwrap()); eprintln!("{}", std::str::from_utf8(&output.stderr).unwrap()); assert!(output.status.success()); diff --git a/tonic-protobuf/src/lib.rs b/tonic-protobuf/src/lib.rs index 747294d7c..84b645cb8 100644 --- a/tonic-protobuf/src/lib.rs +++ b/tonic-protobuf/src/lib.rs @@ -82,7 +82,7 @@ impl Encoder for ProtoEncoder { fn encode(&mut self, item: Self::Item, buf: &mut EncodeBuf<'_>) -> Result<(), Self::Error> { let serialized = item.serialize().map_err(from_decode_error)?; - buf.put_slice(&serialized.as_slice()); + buf.put_slice(serialized.as_slice()); Ok(()) } } @@ -106,7 +106,7 @@ impl Decoder for ProtoDecoder { fn decode(&mut self, buf: &mut DecodeBuf<'_>) -> Result, Self::Error> { let slice = buf.chunk(); - let item = U::parse(&slice).map_err(from_decode_error)?; + let item = U::parse(slice).map_err(from_decode_error)?; buf.advance(slice.len()); Ok(Some(item)) } From 8a8e4e6c309aefc41914d4b20dffff0e0e638bc4 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Mon, 14 Jul 2025 13:06:09 +0530 Subject: [PATCH 17/26] mostly C++ readability fixes --- .../src/grpc_rust_generator.cc | 302 ++++++++---------- .../src/grpc_rust_generator.h | 116 ++++--- protoc-gen-rust-grpc/src/grpc_rust_plugin.cc | 74 +++-- 3 files changed, 230 insertions(+), 262 deletions(-) diff --git a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc index 7bcd9986c..d1d05d476 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc +++ b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc @@ -1,68 +1,61 @@ -/* - * - * Copyright 2025 gRPC authors. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - */ +// Copyright 2025 gRPC authors. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +// IN THE SOFTWARE. #include "src/grpc_rust_generator.h" +#include +#include + #include "absl/strings/str_join.h" #include "absl/strings/str_replace.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" -#include -#include -#include -#include -#include -#include - -#include +#include "google/protobuf/compiler/rust/naming.h" +#include "google/protobuf/descriptor.h" +#include "google/protobuf/descriptor.pb.h" namespace rust_grpc_generator { namespace protobuf = google::protobuf; namespace rust = protobuf::compiler::rust; -using google::protobuf::FileDescriptor; using protobuf::Descriptor; +using protobuf::FileDescriptor; using protobuf::MethodDescriptor; using protobuf::ServiceDescriptor; using protobuf::SourceLocation; using protobuf::io::Printer; +namespace { template -static std::string -GrpcGetCommentsForDescriptor(const DescriptorType *descriptor) { +std::string GrpcGetCommentsForDescriptor(const DescriptorType *descriptor) { SourceLocation location; if (descriptor->GetSourceLocation(&location)) { return location.leading_comments.empty() ? location.trailing_comments : location.leading_comments; } - return std::string(); + return ""; } -static std::string -RustModuleForContainingType(const GrpcOpts &opts, - const Descriptor *containing_type, - const FileDescriptor &file) { +std::string RustModuleForContainingType(const GrpcOpts &opts, + const Descriptor *containing_type, + const FileDescriptor &file) { std::vector modules; // Innermost to outermost order. const Descriptor *parent = containing_type; @@ -82,29 +75,28 @@ RustModuleForContainingType(const GrpcOpts &opts, std::string crate_relative = absl::StrJoin(modules, "::"); - if (opts.is_file_in_current_crate(file)) { + if (opts.IsFileInCurrentCrate(file)) { return crate_relative; } std::string crate_name = - absl::StrCat("::", rust::RsSafeName(opts.get_crate_name(file.name()))); + absl::StrCat("::", rust::RsSafeName(opts.GetCrateName(file.name()))); return absl::StrCat(crate_name, "::", crate_relative); } -static std::string RsTypePathWithinMessageModule(const GrpcOpts &opts, - const Descriptor &msg) { +std::string RsTypePathWithinMessageModule(const GrpcOpts &opts, + const Descriptor &msg) { return absl::StrCat( RustModuleForContainingType(opts, msg.containing_type(), *msg.file()), rust::RsSafeName(msg.name())); } -static std::string RsTypePath(const Descriptor &msg, const GrpcOpts &opts, - int depth) { +std::string RsTypePath(const Descriptor &msg, const GrpcOpts &opts, int depth) { std::string path_within_module = RsTypePathWithinMessageModule(opts, msg); - if (!opts.is_file_in_current_crate(*msg.file())) { + if (!opts.IsFileInCurrentCrate(*msg.file())) { return path_within_module; } - std::string path_to_message_module = opts.message_module_path + "::"; + std::string path_to_message_module = opts.GetMessageModulePath() + "::"; if (path_to_message_module == "self::") { path_to_message_module = ""; } @@ -122,38 +114,38 @@ static std::string RsTypePath(const Descriptor &msg, const GrpcOpts &opts, return prefix + path_to_message_module + std::string(path_within_module); } -struct File { - static absl::Status ReadFileToString(const std::string &name, - std::string *output, bool text_mode) { - char buffer[1024]; - FILE *file = fopen(name.c_str(), text_mode ? "rt" : "rb"); - if (file == nullptr) - return absl::NotFoundError("Could not open file"); - - while (true) { - size_t n = fread(buffer, 1, sizeof(buffer), file); - if (n <= 0) - break; - output->append(buffer, n); - } +absl::Status ReadFileToString(const absl::string_view name, std::string *output, + bool text_mode) { + char buffer[1024]; + FILE *file = fopen(name.data(), text_mode ? "rt" : "rb"); + if (file == nullptr) + return absl::NotFoundError("Could not open file"); + + while (true) { + size_t n = fread(buffer, 1, sizeof(buffer), file); + if (n <= 0) + break; + output->append(buffer, n); + } - int error = ferror(file); - if (fclose(file) != 0) - return absl::InternalError("Failed to close file"); - if (error != 0) { - return absl::InternalError(absl::StrCat("Failed to read the file ", name, - ". Error code: ", error)); - } - return absl::OkStatus(); + int error = ferror(file); + if (fclose(file) != 0) + return absl::InternalError("Failed to close file"); + if (error != 0) { + return absl::ErrnoToStatus(error, + absl::StrCat("Failed to read the file ", name, + ". Error code: ", error)); } -}; + return absl::OkStatus(); +} +} // namespace absl::StatusOr> -GetImportPathToCrateNameMap(const std::string &mapping_file_path) { +GetImportPathToCrateNameMap(const absl::string_view mapping_file_path) { absl::flat_hash_map mapping; std::string mapping_contents; absl::Status status = - File::ReadFileToString(mapping_file_path, &mapping_contents, true); + ReadFileToString(mapping_file_path, &mapping_contents, true); if (!status.ok()) { return status; } @@ -177,97 +169,76 @@ GetImportPathToCrateNameMap(const std::string &mapping_file_path) { return mapping; } -/** - * Method generation abstraction. - * - * Each service contains a set of generic methods that will be used by codegen - * to generate abstraction implementations for the provided methods. - */ +// Method generation abstraction. +// +// Each service contains a set of generic methods that will be used by codegen +// to generate abstraction implementations for the provided methods. class Method { -private: - const MethodDescriptor *method_; - public: Method() = delete; - Method(const MethodDescriptor *method) : method_(method) {} + explicit Method(const MethodDescriptor *method) : method_(method) {} - /// The name of the method in Rust style. - std::string name() const { + // The name of the method in Rust style. + std::string Name() const { return rust::RsSafeName(rust::CamelToSnakeCase(method_->name())); }; - /// The fully-qualified name of the method, scope delimited by periods. - absl::string_view full_name() const { return method_->full_name(); } + // The fully-qualified name of the method, scope delimited by periods. + absl::string_view FullName() const { return method_->full_name(); } - /// The name of the method as it appears in the .proto file. - absl::string_view proto_field_name() const { return method_->name(); }; + // The name of the method as it appears in the .proto file. + absl::string_view ProtoFieldName() const { return method_->name(); }; - /// Checks if the method is streamed by the client. - bool is_client_streaming() const { return method_->client_streaming(); }; + // Checks if the method is streamed by the client. + bool IsClientStreaming() const { return method_->client_streaming(); }; - /// Checks if the method is streamed by the server. - bool is_server_streaming() const { return method_->server_streaming(); }; + // Checks if the method is streamed by the server. + bool IsServerStreaming() const { return method_->server_streaming(); }; - /// Get comments about this method. - std::string comment() const { return GrpcGetCommentsForDescriptor(method_); }; + // Get comments about this method. + std::string Comment() const { return GrpcGetCommentsForDescriptor(method_); }; - /// Checks if the method is deprecated. Default is false. - bool is_deprecated() const { return method_->options().deprecated(); } + // Checks if the method is deprecated. Default is false. + bool IsDeprecated() const { return method_->options().deprecated(); } - /** - * Type name of request. - * @param proto_path The path to the proto file, for context. - * @return A string representing the qualified name for the generated request - * struct. - */ - std::string request_name(const GrpcOpts &opts, int depth) const { + // Returns the Rust type name of request message. + std::string RequestName(const GrpcOpts &opts, int depth) const { const Descriptor *input = method_->input_type(); return RsTypePath(*input, opts, depth); }; - /** - * Type name of response. - * @param proto_path The path to the proto file, for context. - * @return A string representing the qualified name for the generated response - * struct. - */ - std::string response_name(const GrpcOpts &opts, int depth) const { + // Returns the Rust type name of response message. + std::string ResponseName(const GrpcOpts &opts, int depth) const { const Descriptor *output = method_->output_type(); return RsTypePath(*output, opts, depth); }; -}; -/** - * Service generation abstraction. - * - * This class is an interface that can be implemented and consumed - * by client and server generators to allow any codegen module - * to generate service abstractions. - */ -class Service { private: - const ServiceDescriptor *service_; + const MethodDescriptor *method_; +}; +// Service generation abstraction. +// +// This class is an interface that can be implemented and consumed +// by client and server generators to allow any codegen module +// to generate service abstractions. +class Service { public: Service() = delete; - Service(const ServiceDescriptor *service) : service_(service) {} + explicit Service(const ServiceDescriptor *service) : service_(service) {} - /// The name of the service, not including its containing scope. - std::string name() const { + // The name of the service, not including its containing scope. + std::string Name() const { return rust::RsSafeName(rust::SnakeToUpperCamelCase(service_->name())); }; - /// The fully-qualified name of the service, scope delimited by periods. - absl::string_view full_name() const { return service_->full_name(); }; + // The fully-qualified name of the service, scope delimited by periods. + absl::string_view FullName() const { return service_->full_name(); }; - /** - * Methods provided by the service. - * @return A span of non-owning pointers to the Method objects. The Service - * implementation is expected to manage the lifetime of these objects. - */ - std::vector methods() const { + // Returns a list of Methods provided by the service. + std::vector Methods() const { std::vector ret; int methods_count = service_->method_count(); ret.reserve(methods_count); @@ -277,23 +248,20 @@ class Service { return ret; }; - /// Get comments about this service. - virtual std::string comment() const { + // Get comments about this service. + virtual std::string Comment() const { return GrpcGetCommentsForDescriptor(service_); }; + +private: + const ServiceDescriptor *service_; }; -/** - * @brief Formats the full path for a method call. - * @param service The service containing the method. - * @param method The method to format the path for. - * @param emit_package If true, the service name will include its package. - * @return The formatted method path (e.g., "/package.MyService/MyMethod"). - */ +// Formats the full path for a method call. Returns the formatted method path +// (e.g., "/package.MyService/MyMethod") static std::string FormatMethodPath(const Service &service, const Method &method) { - return absl::StrFormat("/%s/%s", service.full_name(), - method.proto_field_name()); + return absl::StrFormat("/%s/%s", service.FullName(), method.ProtoFieldName()); } static std::string SanitizeForRustDoc(absl::string_view raw_comment) { @@ -317,8 +285,8 @@ static std::string SanitizeForRustDoc(absl::string_view raw_comment) { static std::string ProtoCommentToRustDoc(absl::string_view proto_comment) { std::string rust_doc; - std::vector lines = absl::StrSplit(proto_comment, '\n'); - for (const std::string &line : lines) { + std::vector lines = absl::StrSplit(proto_comment, '\n'); + for (const absl::string_view &line : lines) { // Preserve empty lines. if (line.empty()) { rust_doc += ("///\n"); @@ -335,7 +303,7 @@ namespace client { static void GenerateMethods(Printer &printer, const Service &service, const GrpcOpts &opts) { - static std::string unary_format = R"rs( + static const std::string unary_format = R"rs( pub async fn $ident$( &mut self, request: impl tonic::IntoRequest<$request$>, @@ -351,7 +319,7 @@ static void GenerateMethods(Printer &printer, const Service &service, } )rs"; - static std::string server_streaming_format = R"rs( + static const std::string server_streaming_format = R"rs( pub async fn $ident$( &mut self, request: impl tonic::IntoRequest<$request$>, @@ -367,7 +335,7 @@ static void GenerateMethods(Printer &printer, const Service &service, } )rs"; - static std::string client_streaming_format = R"rs( + static const std::string client_streaming_format = R"rs( pub async fn $ident$( &mut self, request: impl tonic::IntoStreamingRequest @@ -383,7 +351,7 @@ static void GenerateMethods(Printer &printer, const Service &service, } )rs"; - static std::string streaming_format = R"rs( + static const std::string streaming_format = R"rs( pub async fn $ident$( &mut self, request: impl tonic::IntoStreamingRequest @@ -399,31 +367,29 @@ static void GenerateMethods(Printer &printer, const Service &service, } )rs"; - const std::vector methods = service.methods(); + const std::vector methods = service.Methods(); for (const Method &method : methods) { - printer.Emit(ProtoCommentToRustDoc(method.comment())); - if (method.is_deprecated()) { + printer.Emit(ProtoCommentToRustDoc(method.Comment())); + if (method.IsDeprecated()) { GenerateDeprecated(printer); } - const std::string request_type = method.request_name(opts, 1); - const std::string response_type = method.response_name(opts, 1); + const std::string request_type = method.RequestName(opts, 1); + const std::string response_type = method.ResponseName(opts, 1); { auto vars = printer.WithVars({{"codec_name", "tonic_protobuf::ProtoCodec"}, - {"ident", method.name()}, + {"ident", method.Name()}, {"request", request_type}, {"response", response_type}, - {"service_name", service.full_name()}, + {"service_name", service.FullName()}, {"path", FormatMethodPath(service, method)}, - {"method_name", method.proto_field_name()}}); + {"method_name", method.ProtoFieldName()}}); - if (!method.is_client_streaming() && !method.is_server_streaming()) { + if (!method.IsClientStreaming() && !method.IsServerStreaming()) { printer.Emit(unary_format); - } else if (!method.is_client_streaming() && - method.is_server_streaming()) { + } else if (!method.IsClientStreaming() && method.IsServerStreaming()) { printer.Emit(server_streaming_format); - } else if (method.is_client_streaming() && - !method.is_server_streaming()) { + } else if (method.IsClientStreaming() && !method.IsServerStreaming()) { printer.Emit(client_streaming_format); } else { printer.Emit(streaming_format); @@ -437,15 +403,15 @@ static void GenerateMethods(Printer &printer, const Service &service, static void generate_client(const Service &service, Printer &printer, const GrpcOpts &opts) { - std::string service_ident = absl::StrFormat("%sClient", service.name()); + std::string service_ident = absl::StrFormat("%sClient", service.Name()); std::string client_mod = - absl::StrFormat("%s_client", rust::CamelToSnakeCase(service.name())); + absl::StrFormat("%s_client", rust::CamelToSnakeCase(service.Name())); printer.Emit( { {"client_mod", client_mod}, {"service_ident", service_ident}, {"service_doc", - [&] { printer.Emit(ProtoCommentToRustDoc(service.comment())); }}, + [&] { printer.Emit(ProtoCommentToRustDoc(service.Comment())); }}, {"methods", [&] { GenerateMethods(printer, service, opts); }}, }, R"rs( @@ -548,16 +514,10 @@ static void generate_client(const Service &service, Printer &printer, } // namespace client -namespace server {} // namespace server - -// Writes the generated service interface into the given -// ZeroCopyOutputStream. void GenerateService(protobuf::io::Printer &printer, const ServiceDescriptor *service_desc, const GrpcOpts &opts) { - const Service service = Service(service_desc); - - client::generate_client(service, printer, opts); + client::generate_client(Service(service_desc), printer, opts); } std::string GetRsGrpcFile(const protobuf::FileDescriptor &file) { diff --git a/protoc-gen-rust-grpc/src/grpc_rust_generator.h b/protoc-gen-rust-grpc/src/grpc_rust_generator.h index 2f7b591f3..93787f7a8 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_generator.h +++ b/protoc-gen-rust-grpc/src/grpc_rust_generator.h @@ -1,58 +1,59 @@ -/* - * - * Copyright 2025 gRPC authors. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - */ +// Copyright 2025 gRPC authors. +// +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +// IN THE SOFTWARE. -#ifndef NET_GRPC_COMPILER_RUST_GENERATOR_H_ -#define NET_GRPC_COMPILER_RUST_GENERATOR_H_ +#ifndef PROTOC_GEN_RUST_GRPC_GRPC_RUST_GENERATOR_H_ +#define PROTOC_GEN_RUST_GRPC_GRPC_RUST_GENERATOR_H_ #include "absl/log/absl_log.h" -#include -#include -#include +#include "google/protobuf/descriptor.h" namespace rust_grpc_generator { -namespace impl { -namespace protobuf = google::protobuf; -} // namespace impl - class GrpcOpts { public: - /// Path of the module containing the generated message code. Defaults to - /// "self", i.e. the message code and service code are present in the same - /// module. - std::string message_module_path = "self"; - absl::flat_hash_map import_path_to_crate_name = {}; - std::vector files_in_current_crate = - {}; + void SetMessageModulePath(const std::string path) { + message_module_path_ = std::move(path); + } - absl::string_view get_crate_name(absl::string_view import_path) const { - auto it = import_path_to_crate_name.find(import_path); - if (it == import_path_to_crate_name.end()) { + const std::string &GetMessageModulePath() const { + return message_module_path_; + } + + void SetImportPathToCrateName( + const absl::flat_hash_map mapping) { + import_path_to_crate_name_ = std::move(mapping); + } + + void SetFilesInCurrentCrate( + const std::vector files) { + files_in_current_crate_ = std::move(files); + } + + absl::string_view GetCrateName(absl::string_view import_path) const { + auto it = import_path_to_crate_name_.find(import_path); + if (it == import_path_to_crate_name_.end()) { ABSL_LOG(ERROR) << "Path " << import_path << " not found in crate mapping. Crate mapping contains " - << import_path_to_crate_name.size() << " entries:"; - for (const auto &entry : import_path_to_crate_name) { + << import_path_to_crate_name_.size() << " entries:"; + for (const auto &entry : import_path_to_crate_name_) { ABSL_LOG(ERROR) << " " << entry.first << " : " << entry.second << "\n"; } ABSL_LOG(FATAL) << "Cannot continue with missing crate mapping."; @@ -60,19 +61,28 @@ class GrpcOpts { return it->second; } - bool is_file_in_current_crate(const impl::protobuf::FileDescriptor &f) const { - return std::find(files_in_current_crate.begin(), - files_in_current_crate.end(), - &f) != files_in_current_crate.end(); + bool IsFileInCurrentCrate(const google::protobuf::FileDescriptor &f) const { + return std::find(files_in_current_crate_.begin(), + files_in_current_crate_.end(), + &f) != files_in_current_crate_.end(); } + +private: + // Path of the module containing the generated message code. Defaults to + // "self", i.e. the message code and service code are present in the same + // module. + std::string message_module_path_ = "self"; + absl::flat_hash_map import_path_to_crate_name_ = {}; + std::vector + files_in_current_crate_ = {}; }; // Writes the generated service interface into the given ZeroCopyOutputStream -void GenerateService(impl::protobuf::io::Printer &printer, - const impl::protobuf::ServiceDescriptor *service, +void GenerateService(google::protobuf::io::Printer &printer, + const google::protobuf::ServiceDescriptor *service, const GrpcOpts &opts); -std::string GetRsGrpcFile(const impl::protobuf::FileDescriptor &file); +std::string GetRsGrpcFile(const google::protobuf::FileDescriptor &file); // Returns a map from import path of a .proto file to the name of the crate // covering that file. @@ -86,7 +96,7 @@ std::string GetRsGrpcFile(const impl::protobuf::FileDescriptor &file); // ... // > -GetImportPathToCrateNameMap(const std::string &mapping_file_path); +GetImportPathToCrateNameMap(const absl::string_view mapping_file_path); } // namespace rust_grpc_generator -#endif // NET_GRPC_COMPILER_RUST_GENERATOR_H_ +#endif // PROTOC_GEN_RUST_GRPC_GRPC_RUST_GENERATOR_H_ diff --git a/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc b/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc index 0d6bbf878..5fb0a6921 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc +++ b/protoc-gen-rust-grpc/src/grpc_rust_plugin.cc @@ -1,35 +1,31 @@ -/* - * - * Copyright 2025 gRPC authors. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - */ +// Copyright 2025 gRPC authors. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +// IN THE SOFTWARE. -#include "grpc_rust_generator.h" -#include -#include -#include -#include -#include #include +#include "google/protobuf/compiler/code_generator.h" +#include "google/protobuf/compiler/plugin.h" +#include "google/protobuf/io/printer.h" + +#include "grpc_rust_generator.h" + namespace protobuf = google::protobuf; class RustGrpcGenerator : public protobuf::compiler::CodeGenerator { @@ -64,23 +60,25 @@ class RustGrpcGenerator : public protobuf::compiler::CodeGenerator { protobuf::compiler::ParseGeneratorParameter(parameter, &options); rust_grpc_generator::GrpcOpts grpc_opts; - absl::StatusOr> - import_path_to_crate_name; for (auto opt : options) { if (opt.first == "message_module_path") { - grpc_opts.message_module_path = opt.second; + grpc_opts.SetMessageModulePath(opt.second); } else if (opt.first == "crate_mapping") { - absl::StatusOr status = - rust_grpc_generator::GetImportPathToCrateNameMap(opt.second); - if (!status.ok()) { - *error = std::string(status.status().message()); + absl::StatusOr> + crate_map = + rust_grpc_generator::GetImportPathToCrateNameMap(opt.second); + if (crate_map.ok()) { + grpc_opts.SetImportPathToCrateName(std::move(*crate_map)); + } else { + *error = std::string(crate_map.status().message()); return false; } - import_path_to_crate_name = status.value(); } } - context->ListParsedFiles(&grpc_opts.files_in_current_crate); + std::vector files; + context->ListParsedFiles(&files); + grpc_opts.SetFilesInCurrentCrate(std::move(files)); auto outfile = absl::WrapUnique( context->Open(rust_grpc_generator::GetRsGrpcFile(*file))); From cd77988908444cfa392508ad025ef730bff5f22a Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Mon, 14 Jul 2025 16:15:34 +0530 Subject: [PATCH 18/26] Format generated code during build --- tonic-protobuf-build/Cargo.toml | 2 ++ tonic-protobuf-build/src/lib.rs | 50 +++++++++++++++++++++++++++++---- 2 files changed, 47 insertions(+), 5 deletions(-) diff --git a/tonic-protobuf-build/Cargo.toml b/tonic-protobuf-build/Cargo.toml index 2806b6d49..4364a1122 100644 --- a/tonic-protobuf-build/Cargo.toml +++ b/tonic-protobuf-build/Cargo.toml @@ -6,4 +6,6 @@ authors = ["gRPC Authors"] license = "MIT" [dependencies] +prettyplease = "0.2.35" protobuf-codegen = { version = "4.31.1-release" } +syn = "2.0.104" diff --git a/tonic-protobuf-build/src/lib.rs b/tonic-protobuf-build/src/lib.rs index 7e1bd0aea..4d1553c0d 100644 --- a/tonic-protobuf-build/src/lib.rs +++ b/tonic-protobuf-build/src/lib.rs @@ -22,11 +22,11 @@ * */ +use std::fs::{self, read_to_string}; use std::io::Write; -use std::{ - fs::File, - path::{Path, PathBuf}, -}; +use std::path::{Path, PathBuf}; + +use syn::parse_file; /// Details about a crate containing proto files with symbols refferenced in /// the file being compiled currently. @@ -64,6 +64,7 @@ pub struct CodeGen { message_module_path: Option, // Whether to generate message code, defaults to true. generate_message_code: bool, + should_format_code: bool, } impl CodeGen { @@ -75,6 +76,7 @@ impl CodeGen { dependencies: Vec::new(), message_module_path: None, generate_message_code: true, + should_format_code: true, } } @@ -98,6 +100,12 @@ impl CodeGen { self } + /// Enables or disables formatting of generated code. + pub fn should_format_code(&mut self, enable: bool) -> &mut Self { + self.should_format_code = enable; + self + } + /// Sets the directory for the files generated by protoc. The generated code /// will be present in a subdirectory corresponding to the path of the /// proto file withing the included directories. @@ -202,12 +210,44 @@ impl CodeGen { println!("{}", std::str::from_utf8(&output.stdout).unwrap()); eprintln!("{}", std::str::from_utf8(&output.stderr).unwrap()); assert!(output.status.success()); + + if self.should_format_code { + self.format_code(); + } Ok(()) } + fn format_code(&self) { + let mut generated_file_paths = Vec::new(); + let output_dir = &self.output_dir; + if self.generate_message_code { + generated_file_paths.push(output_dir.join("generated.rs")); + } + for proto_path in &self.inputs { + let Some(stem) = proto_path.file_stem().and_then(|s| s.to_str()) else { + continue; + }; + generated_file_paths.push(output_dir.join(format!("{}_grpc.pb.rs", stem))); + if self.generate_message_code { + generated_file_paths.push(output_dir.join(format!("{}.u.pb.rs", stem))); + } + } + + for path in &generated_file_paths { + // The path may not exist if there are no services present in the + // proto file. + if path.exists() { + let src = read_to_string(path).expect("Failed to read generated file"); + let syntax = parse_file(&src).unwrap(); + let formatted = prettyplease::unparse(&syntax); + fs::write(path, formatted).unwrap(); + } + } + } + fn generate_crate_mapping_file(&self) -> PathBuf { let crate_mapping_path = self.output_dir.join("crate_mapping.txt"); - let mut file = File::create(crate_mapping_path.clone()).unwrap(); + let mut file = fs::File::create(crate_mapping_path.clone()).unwrap(); for dep in &self.dependencies { file.write_all(format!("{}\n", dep.crate_name).as_bytes()) .unwrap(); From 9f06a59f50e7a2aaff01206187c58e6273ea5ccc Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Thu, 17 Jul 2025 19:03:21 +0530 Subject: [PATCH 19/26] Address review --- .github/workflows/CI.yml | 4 +++ grpc/Cargo.toml | 1 - grpc/src/macros.rs | 8 ++--- interop/build.rs | 2 +- tonic-protobuf-build/Cargo.toml | 3 +- tonic-protobuf-build/README.md | 19 +++++----- tonic-protobuf-build/src/lib.rs | 62 +++++++++++++++++++++++++++++---- tonic-protobuf/Cargo.toml | 3 +- tonic-protobuf/src/lib.rs | 4 +++ 9 files changed, 83 insertions(+), 23 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index c3125697e..4be749c08 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -52,6 +52,10 @@ jobs: # Share repository cache between workflows. repository-cache: true module-root: ./protoc-gen-rust-grpc + # Building the protoc plugin from scratch takes 6–14 minutes, depending on + # the OS. This delays the execution of workflows that use the plugin in + # build.rs files. We try to avoid rebuilding the plugin if it hasn't + # changed. - name: Build protoc plugin if: steps.cache-plugin.outputs.cache-hit != 'true' working-directory: ./protoc-gen-rust-grpc diff --git a/grpc/Cargo.toml b/grpc/Cargo.toml index f57666b5b..8bb573c02 100644 --- a/grpc/Cargo.toml +++ b/grpc/Cargo.toml @@ -28,5 +28,4 @@ dns = ["dep:hickory-resolver"] allowed_external_types = [ "tonic::*", "futures_core::stream::Stream", - "protobuf::codegen_traits::Message", ] diff --git a/grpc/src/macros.rs b/grpc/src/macros.rs index efbbe92dd..4bc087826 100644 --- a/grpc/src/macros.rs +++ b/grpc/src/macros.rs @@ -51,10 +51,10 @@ /// ```rust,ignore /// mod protos { /// // Include message code. -/// include!("/protobuf/directory/protos/generated.rs"); +/// include!("relative/protobuf/directory/generated.rs"); /// /// // Include service code. -/// include!("/protobuf/directory/protos/helloworld_grpc.pb.rs"); +/// include!("relative/protobuf/directory/helloworld_grpc.pb.rs"); /// } /// ``` /// @@ -64,12 +64,12 @@ /// ```rust,ignore /// mod protos { /// // Include message code. -/// include!("/protobuf/directory/protos/generated.rs"); +/// include!("relative/protobuf/directory/generated.rs"); /// } /// /// mod grpc { /// // Include service code. -/// include!("/protobuf/directory/proto/helloworld_grpc.pb.rs"); +/// include!("relative/protobuf/directory/helloworld_grpc.pb.rs"); /// } /// ``` /// diff --git a/interop/build.rs b/interop/build.rs index 133896f02..26881e083 100644 --- a/interop/build.rs +++ b/interop/build.rs @@ -5,7 +5,7 @@ fn main() { tonic_protobuf_build::CodeGen::new() .include("proto/grpc/testing") .inputs(["test.proto", "empty.proto", "messages.proto"]) - .generate_and_compile() + .compile() .unwrap(); // prevent needing to rebuild if files (or deps) haven't changed diff --git a/tonic-protobuf-build/Cargo.toml b/tonic-protobuf-build/Cargo.toml index 4364a1122..744da8d61 100644 --- a/tonic-protobuf-build/Cargo.toml +++ b/tonic-protobuf-build/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "tonic-protobuf-build" -version = "0.9.0-alpha.1" +version = "0.14.0" edition = "2021" authors = ["gRPC Authors"] license = "MIT" +publish = false [dependencies] prettyplease = "0.2.35" diff --git a/tonic-protobuf-build/README.md b/tonic-protobuf-build/README.md index e47870eaa..04fb2c2d0 100644 --- a/tonic-protobuf-build/README.md +++ b/tonic-protobuf-build/README.md @@ -11,6 +11,7 @@ Required dependencies [dependencies] tonic = "" protobuf = "" +tonic-protobuf = "" [build-dependencies] tonic-protobuf-build = "" @@ -31,7 +32,7 @@ fn main() -> Result<(), Box> { tonic_protobuf_build::CodeGen::new() .include("proto") .inputs(["service.proto"]) - .generate_and_compile()?; + .compile()?; Ok(()) } ``` @@ -40,18 +41,20 @@ Or configure the generated code deeper via ```rust,no_run fn main() -> Result<(), Box> { - tonic_protobuf_build::configure() + let dependency = tonic_protobuf_build::Dependency::builder() + .crate_name("external_protos".to_string()) + .proto_import_paths(vec![PathBuf::from("external/message.proto")]) + .proto_files(vec!["message.proto".to_string()]) + .build()?; + + tonic_protobuf_build::CodeGen::new() .generate_message_code(false) .inputs(["proto/helloworld/helloworld.proto"]) .include("external") .message_module_path("super::proto") - .dependencies(vec![tonic_protobuf_build::Dependency { - crate_name: "external_protos".to_string(), - proto_import_paths: vec![PathBuf::from("external/message.proto")], - proto_files: vec!["message.proto".to_string()], - }]) + .dependencies(vec![dependency]) //.out_dir("src/generated") // you can change the generated code's location - .generate_and_compile()?; + .compile()?; Ok(()) } ``` diff --git a/tonic-protobuf-build/src/lib.rs b/tonic-protobuf-build/src/lib.rs index 4d1553c0d..b3a54b69f 100644 --- a/tonic-protobuf-build/src/lib.rs +++ b/tonic-protobuf-build/src/lib.rs @@ -28,17 +28,66 @@ use std::path::{Path, PathBuf}; use syn::parse_file; -/// Details about a crate containing proto files with symbols refferenced in +/// Details about a crate containing proto files with symbols referenced in /// the file being compiled currently. #[derive(Debug, Clone)] pub struct Dependency { + crate_name: String, + proto_import_paths: Vec, + proto_files: Vec, +} + +impl Dependency { + pub fn builder() -> DependencyBuilder { + DependencyBuilder::default() + } +} + +#[derive(Default, Debug)] +pub struct DependencyBuilder { + crate_name: Option, + proto_import_paths: Vec, + proto_files: Vec, +} + +impl DependencyBuilder { /// Name of the external crate. - pub crate_name: String, + pub fn crate_name(mut self, name: impl Into) -> Self { + self.crate_name = Some(name.into()); + self + } + /// List of paths .proto files whose codegen is present in the crate. This /// is used to re-run the build command if required. - pub proto_import_paths: Vec, + pub fn proto_import_path(mut self, path: impl Into) -> Self { + self.proto_import_paths.push(path.into()); + self + } + /// List of .proto file names whose codegen is present in the crate. - pub proto_files: Vec, + pub fn proto_import_paths(mut self, paths: Vec) -> Self { + self.proto_import_paths = paths; + self + } + + pub fn proto_file(mut self, file: impl Into) -> Self { + self.proto_files.push(file.into()); + self + } + + pub fn proto_files(mut self, files: Vec) -> Self { + self.proto_files = files; + self + } + + pub fn build(self) -> Result { + let crate_name = self.crate_name.ok_or("crate_name is required")?; + Ok(Dependency { + crate_name, + proto_import_paths: self.proto_import_paths, + proto_files: self.proto_files, + }) + } } impl From<&Dependency> for protobuf_codegen::Dependency { @@ -46,8 +95,7 @@ impl From<&Dependency> for protobuf_codegen::Dependency { protobuf_codegen::Dependency { crate_name: val.crate_name.clone(), proto_import_paths: val.proto_import_paths.clone(), - // TODO: Is this useful to expose the following field? It's not used - // by protobuf codegen. + // The following field is not used by protobuf codegen. c_include_paths: Vec::new(), proto_files: val.proto_files.clone(), } @@ -146,7 +194,7 @@ impl CodeGen { self } - pub fn generate_and_compile(&self) -> Result<(), String> { + pub fn compile(&self) -> Result<(), String> { // Generate the message code. if self.generate_message_code { protobuf_codegen::CodeGen::new() diff --git a/tonic-protobuf/Cargo.toml b/tonic-protobuf/Cargo.toml index b830fa198..cdfe675be 100644 --- a/tonic-protobuf/Cargo.toml +++ b/tonic-protobuf/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "tonic-protobuf" -version = "0.9.0-alpha.1" +version = "0.14.0" edition = "2021" authors = ["gRPC Authors"] license = "MIT" +publish = false [dependencies] tonic = { version = "0.14.0", path = "../tonic", default-features = false, features = ["codegen"] } diff --git a/tonic-protobuf/src/lib.rs b/tonic-protobuf/src/lib.rs index 84b645cb8..4de4c1177 100644 --- a/tonic-protobuf/src/lib.rs +++ b/tonic-protobuf/src/lib.rs @@ -81,6 +81,10 @@ impl Encoder for ProtoEncoder { type Error = Status; fn encode(&mut self, item: Self::Item, buf: &mut EncodeBuf<'_>) -> Result<(), Self::Error> { + // The protobuf library doesn't support serializing into a user-provided + // buffer. Instead, it allocates its own buffer, resulting in an extra + // copy and allocation. + // TODO: Find a way to avoid this extra copy. let serialized = item.serialize().map_err(from_decode_error)?; buf.put_slice(serialized.as_slice()); Ok(()) From 5cd495c22a0ee6f98aacce0e06dc01f3bf818b40 Mon Sep 17 00:00:00 2001 From: Arjan Singh Bal <46515553+arjan-bal@users.noreply.github.com> Date: Thu, 17 Jul 2025 19:12:11 +0530 Subject: [PATCH 20/26] Apply suggestions from code review Co-authored-by: Lucio Franco --- interop/Cargo.toml | 2 +- tonic-protobuf/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/interop/Cargo.toml b/interop/Cargo.toml index a91786fa0..f6c69ca14 100644 --- a/interop/Cargo.toml +++ b/interop/Cargo.toml @@ -30,7 +30,7 @@ tonic = {path = "../tonic", features = ["tls-ring"]} tower = "0.5" tracing-subscriber = {version = "0.3"} grpc = {path = "../grpc"} -protobuf = { version = "4.31.1-release"} +protobuf = { version = "4.31.1-release" } tonic-protobuf = {path = "../tonic-protobuf"} [build-dependencies] diff --git a/tonic-protobuf/Cargo.toml b/tonic-protobuf/Cargo.toml index cdfe675be..c573f1062 100644 --- a/tonic-protobuf/Cargo.toml +++ b/tonic-protobuf/Cargo.toml @@ -8,7 +8,7 @@ publish = false [dependencies] tonic = { version = "0.14.0", path = "../tonic", default-features = false, features = ["codegen"] } -bytes = "1.10.1" +bytes = "1" protobuf = { version = "4.31.1-release" } [package.metadata.cargo_check_external_types] From c3e03f96773483db988536ab26781f9ed5614d79 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Thu, 17 Jul 2025 23:08:59 +0530 Subject: [PATCH 21/26] Combine client binaries, use flag for codec --- interop/Cargo.toml | 4 - interop/src/bin/client.rs | 70 +++-- interop/src/bin/client_new_codegen.rs | 169 ---------- interop/src/client.rs | 414 +------------------------ interop/src/client_new_codegen.rs | 422 ------------------------- interop/src/client_prost.rs | 419 +++++++++++++++++++++++++ interop/src/client_protobuf.rs | 429 ++++++++++++++++++++++++++ interop/src/lib.rs | 3 +- interop/test.sh | 6 +- 9 files changed, 918 insertions(+), 1018 deletions(-) delete mode 100644 interop/src/bin/client_new_codegen.rs delete mode 100644 interop/src/client_new_codegen.rs create mode 100644 interop/src/client_prost.rs create mode 100644 interop/src/client_protobuf.rs diff --git a/interop/Cargo.toml b/interop/Cargo.toml index f6c69ca14..88c7355cc 100644 --- a/interop/Cargo.toml +++ b/interop/Cargo.toml @@ -12,10 +12,6 @@ path = "src/bin/client.rs" name = "server" path = "src/bin/server.rs" -[[bin]] -name = "client_new_codegen" -path = "src/bin/client_new_codegen.rs" - [dependencies] async-stream = "0.3" strum = {version = "0.27", features = ["derive"]} diff --git a/interop/src/bin/client.rs b/interop/src/bin/client.rs index 01c279200..8a6b4af2d 100644 --- a/interop/src/bin/client.rs +++ b/interop/src/bin/client.rs @@ -1,4 +1,5 @@ -use interop::client; +use interop::client::{InteropTest, InteropTestUnimplemented}; +use interop::{client_prost, client_protobuf}; use std::{str::FromStr, time::Duration}; use tonic::transport::Endpoint; use tonic::transport::{Certificate, ClientTlsConfig}; @@ -7,6 +8,25 @@ use tonic::transport::{Certificate, ClientTlsConfig}; struct Opts { use_tls: bool, test_case: Vec, + codec: Codec, +} + +#[derive(Debug)] +enum Codec { + Prost, + Protobuf, +} + +impl FromStr for Codec { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "prost" => Ok(Codec::Prost), + "protobuf" => Ok(Codec::Protobuf), + _ => Err(format!("Invalid codec: {}", s)), + } + } } impl Opts { @@ -17,6 +37,7 @@ impl Opts { test_case: pargs.value_from_fn("--test_case", |test_case| { test_case.split(',').map(Testcase::from_str).collect() })?, + codec: pargs.value_from_str("--codec")?, }) } } @@ -48,8 +69,19 @@ async fn main() -> Result<(), Box> { let channel = endpoint.connect().await?; - let mut client = client::TestClient::new(channel.clone()); - let mut unimplemented_client = client::UnimplementedClient::new(channel); + let (mut client, mut unimplemented_client): ( + Box, + Box, + ) = match matches.codec { + Codec::Prost => ( + Box::new(client_prost::TestClient::new(channel.clone())), + Box::new(client_prost::UnimplementedClient::new(channel)), + ), + Codec::Protobuf => ( + Box::new(client_protobuf::TestClient::new(channel.clone())), + Box::new(client_protobuf::UnimplementedClient::new(channel)), + ), + }; let mut failures = Vec::new(); @@ -58,31 +90,25 @@ async fn main() -> Result<(), Box> { let mut test_results = Vec::new(); match test_case { - Testcase::EmptyUnary => client::empty_unary(&mut client, &mut test_results).await, - Testcase::LargeUnary => client::large_unary(&mut client, &mut test_results).await, - Testcase::ClientStreaming => { - client::client_streaming(&mut client, &mut test_results).await - } - Testcase::ServerStreaming => { - client::server_streaming(&mut client, &mut test_results).await - } - Testcase::PingPong => client::ping_pong(&mut client, &mut test_results).await, - Testcase::EmptyStream => client::empty_stream(&mut client, &mut test_results).await, + Testcase::EmptyUnary => client.empty_unary(&mut test_results).await, + Testcase::LargeUnary => client.large_unary(&mut test_results).await, + Testcase::ClientStreaming => client.client_streaming(&mut test_results).await, + Testcase::ServerStreaming => client.server_streaming(&mut test_results).await, + Testcase::PingPong => client.ping_pong(&mut test_results).await, + Testcase::EmptyStream => client.empty_stream(&mut test_results).await, Testcase::StatusCodeAndMessage => { - client::status_code_and_message(&mut client, &mut test_results).await + client.status_code_and_message(&mut test_results).await } Testcase::SpecialStatusMessage => { - client::special_status_message(&mut client, &mut test_results).await - } - Testcase::UnimplementedMethod => { - client::unimplemented_method(&mut client, &mut test_results).await + client.special_status_message(&mut test_results).await } + Testcase::UnimplementedMethod => client.unimplemented_method(&mut test_results).await, Testcase::UnimplementedService => { - client::unimplemented_service(&mut unimplemented_client, &mut test_results).await - } - Testcase::CustomMetadata => { - client::custom_metadata(&mut client, &mut test_results).await + unimplemented_client + .unimplemented_service(&mut test_results) + .await } + Testcase::CustomMetadata => client.custom_metadata(&mut test_results).await, _ => unimplemented!(), } diff --git a/interop/src/bin/client_new_codegen.rs b/interop/src/bin/client_new_codegen.rs deleted file mode 100644 index 99b8f700b..000000000 --- a/interop/src/bin/client_new_codegen.rs +++ /dev/null @@ -1,169 +0,0 @@ -/* - * - * Copyright 2025 gRPC authors. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - */ - -use interop::client_new_codegen; -use std::{str::FromStr, time::Duration}; -use tonic::transport::Endpoint; -use tonic::transport::{Certificate, ClientTlsConfig}; - -#[derive(Debug)] -struct Opts { - use_tls: bool, - test_case: Vec, -} - -impl Opts { - fn parse() -> Result { - let mut pargs = pico_args::Arguments::from_env(); - Ok(Self { - use_tls: pargs.contains("--use_tls"), - test_case: pargs.value_from_fn("--test_case", |test_case| { - test_case.split(',').map(Testcase::from_str).collect() - })?, - }) - } -} - -#[tokio::main] -async fn main() -> Result<(), Box> { - interop::trace_init(); - - let matches = Opts::parse()?; - - let test_cases = matches.test_case; - - let scheme = if matches.use_tls { "https" } else { "http" }; - - #[allow(unused_mut)] - let mut endpoint = Endpoint::try_from(format!("{}://localhost:10000", scheme))? - .timeout(Duration::from_secs(5)) - .concurrency_limit(30); - - if matches.use_tls { - let pem = std::fs::read_to_string("interop/data/ca.pem")?; - let ca = Certificate::from_pem(pem); - endpoint = endpoint.tls_config( - ClientTlsConfig::new() - .ca_certificate(ca) - .domain_name("foo.test.google.fr"), - )?; - } - - let channel = endpoint.connect().await?; - - let mut client = client_new_codegen::TestClient::new(channel.clone()); - let mut unimplemented_client = client_new_codegen::UnimplementedClient::new(channel); - - let mut failures = Vec::new(); - - for test_case in test_cases { - println!("{:?}:", test_case); - let mut test_results = Vec::new(); - - match test_case { - Testcase::EmptyUnary => { - client_new_codegen::empty_unary(&mut client, &mut test_results).await - } - Testcase::LargeUnary => { - client_new_codegen::large_unary(&mut client, &mut test_results).await - } - Testcase::ClientStreaming => { - client_new_codegen::client_streaming(&mut client, &mut test_results).await - } - Testcase::ServerStreaming => { - client_new_codegen::server_streaming(&mut client, &mut test_results).await - } - Testcase::PingPong => { - client_new_codegen::ping_pong(&mut client, &mut test_results).await - } - Testcase::EmptyStream => { - client_new_codegen::empty_stream(&mut client, &mut test_results).await - } - Testcase::StatusCodeAndMessage => { - client_new_codegen::status_code_and_message(&mut client, &mut test_results).await - } - Testcase::SpecialStatusMessage => { - client_new_codegen::special_status_message(&mut client, &mut test_results).await - } - Testcase::UnimplementedMethod => { - client_new_codegen::unimplemented_method(&mut client, &mut test_results).await - } - Testcase::UnimplementedService => { - client_new_codegen::unimplemented_service( - &mut unimplemented_client, - &mut test_results, - ) - .await - } - Testcase::CustomMetadata => { - client_new_codegen::custom_metadata(&mut client, &mut test_results).await - } - _ => unimplemented!(), - } - - for result in test_results { - println!(" {}", result); - - if result.is_failed() { - failures.push(result); - } - } - } - - if !failures.is_empty() { - println!("{} tests failed", failures.len()); - std::process::exit(1); - } - - Ok(()) -} - -#[derive(Debug, strum::EnumString)] -#[strum(serialize_all = "snake_case")] -enum Testcase { - EmptyUnary, - CacheableUnary, - LargeUnary, - ClientCompressedUnary, - ServerCompressedUnary, - ClientStreaming, - ClientCompressedStreaming, - ServerStreaming, - ServerCompressedStreaming, - PingPong, - EmptyStream, - ComputeEngineCreds, - JwtTokenCreds, - Oauth2AuthToken, - PerRpcCreds, - CustomMetadata, - StatusCodeAndMessage, - SpecialStatusMessage, - UnimplementedMethod, - UnimplementedService, - CancelAfterBegin, - CancelAfterFirstResponse, - TimeoutOnSleepingServer, - ConcurrentLargeUnary, -} diff --git a/interop/src/client.rs b/interop/src/client.rs index 389264684..1e448d652 100644 --- a/interop/src/client.rs +++ b/interop/src/client.rs @@ -1,410 +1,30 @@ -use crate::{ - pb::test_service_client::*, pb::unimplemented_service_client::*, pb::*, test_assert, - TestAssertion, -}; -use tokio::sync::mpsc; -use tokio_stream::StreamExt; -use tonic::transport::Channel; -use tonic::{metadata::MetadataValue, Code, Request, Response, Status}; +use crate::TestAssertion; +use tonic::async_trait; -pub type TestClient = TestServiceClient; -pub type UnimplementedClient = UnimplementedServiceClient; +#[async_trait] +pub trait InteropTest: Send { + async fn empty_unary(&mut self, assertions: &mut Vec); -const LARGE_REQ_SIZE: usize = 271_828; -const LARGE_RSP_SIZE: i32 = 314_159; -const REQUEST_LENGTHS: &[i32] = &[27182, 8, 1828, 45904]; -const RESPONSE_LENGTHS: &[i32] = &[31415, 9, 2653, 58979]; -const TEST_STATUS_MESSAGE: &str = "test status message"; -const SPECIAL_TEST_STATUS_MESSAGE: &str = - "\t\ntest with whitespace\r\nand Unicode BMP ☺ and non-BMP 😈\t\n"; + async fn large_unary(&mut self, assertions: &mut Vec); -pub async fn empty_unary(client: &mut TestClient, assertions: &mut Vec) { - let result = client.empty_call(Request::new(Empty {})).await; + async fn client_streaming(&mut self, assertions: &mut Vec); - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); + async fn server_streaming(&mut self, assertions: &mut Vec); - if let Ok(response) = result { - let body = response.into_inner(); - assertions.push(test_assert!( - "body must not be null", - body == Empty {}, - format!("body={:?}", body) - )); - } -} - -pub async fn large_unary(client: &mut TestClient, assertions: &mut Vec) { - use std::mem; - let payload = crate::client_payload(LARGE_REQ_SIZE); - let req = SimpleRequest { - response_type: PayloadType::Compressable as i32, - response_size: LARGE_RSP_SIZE, - payload: Some(payload), - ..Default::default() - }; - - let result = client.unary_call(Request::new(req)).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result { - let body = response.into_inner(); - let payload_len = body.payload.as_ref().map(|p| p.body.len()).unwrap_or(0); - - assertions.push(test_assert!( - "body must be 314159 bytes", - payload_len == LARGE_RSP_SIZE as usize, - format!("mem::size_of_val(&body)={:?}", mem::size_of_val(&body)) - )); - } -} - -// pub async fn cachable_unary(client: &mut Client, assertions: &mut Vec) { -// let payload = Payload { -// r#type: PayloadType::Compressable as i32, -// body: format!("{:?}", std::time::Instant::now()).into_bytes(), -// }; -// let req = SimpleRequest { -// response_type: PayloadType::Compressable as i32, -// payload: Some(payload), -// ..Default::default() -// }; - -// client. -// } - -pub async fn client_streaming(client: &mut TestClient, assertions: &mut Vec) { - let requests = REQUEST_LENGTHS.iter().map(|len| StreamingInputCallRequest { - payload: Some(crate::client_payload(*len as usize)), - ..Default::default() - }); - - let stream = tokio_stream::iter(requests); - - let result = client.streaming_input_call(Request::new(stream)).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result { - let body = response.into_inner(); - - assertions.push(test_assert!( - "aggregated payload size must be 74922 bytes", - body.aggregated_payload_size == 74922, - format!("aggregated_payload_size={:?}", body.aggregated_payload_size) - )); - } -} - -pub async fn server_streaming(client: &mut TestClient, assertions: &mut Vec) { - let req = StreamingOutputCallRequest { - response_parameters: RESPONSE_LENGTHS - .iter() - .map(|len| ResponseParameters::with_size(*len)) - .collect(), - ..Default::default() - }; - let req = Request::new(req); - - let result = client.streaming_output_call(req).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result { - let responses = response - .into_inner() - .filter_map(|m| m.ok()) - .collect::>() - .await; - let actual_response_lengths = crate::response_lengths(&responses); - let asserts = vec![ - test_assert!( - "there should be four responses", - responses.len() == 4, - format!("responses.len()={:?}", responses.len()) - ), - test_assert!( - "the response payload sizes should match input", - RESPONSE_LENGTHS == actual_response_lengths.as_slice(), - format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) - ), - ]; - - assertions.extend(asserts); - } -} - -pub async fn ping_pong(client: &mut TestClient, assertions: &mut Vec) { - let (tx, rx) = mpsc::unbounded_channel(); - tx.send(make_ping_pong_request(0)).unwrap(); - - let result = client - .full_duplex_call(Request::new( - tokio_stream::wrappers::UnboundedReceiverStream::new(rx), - )) - .await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(mut response) = result.map(Response::into_inner) { - let mut responses = Vec::new(); - - loop { - match response.next().await { - Some(result) => { - responses.push(result.unwrap()); - if responses.len() == REQUEST_LENGTHS.len() { - drop(tx); - break; - } else { - tx.send(make_ping_pong_request(responses.len())).unwrap(); - } - } - None => { - assertions.push(TestAssertion::Failed { - description: - "server should keep the stream open until the client closes it", - expression: "Stream terminated unexpectedly early", - why: None, - }); - break; - } - } - } - - let actual_response_lengths = crate::response_lengths(&responses); - assertions.push(test_assert!( - "there should be four responses", - responses.len() == RESPONSE_LENGTHS.len(), - format!("{:?}={:?}", responses.len(), RESPONSE_LENGTHS.len()) - )); - assertions.push(test_assert!( - "the response payload sizes should match input", - RESPONSE_LENGTHS == actual_response_lengths.as_slice(), - format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) - )); - } -} - -pub async fn empty_stream(client: &mut TestClient, assertions: &mut Vec) { - let stream = tokio_stream::empty(); - let result = client.full_duplex_call(Request::new(stream)).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result.map(Response::into_inner) { - let responses = response.collect::>().await; - - assertions.push(test_assert!( - "there should be no responses", - responses.is_empty(), - format!("responses.len()={:?}", responses.len()) - )); - } -} - -pub async fn status_code_and_message(client: &mut TestClient, assertions: &mut Vec) { - fn validate_response(result: Result, assertions: &mut Vec) - where - T: std::fmt::Debug, - { - assertions.push(test_assert!( - "call must fail with unknown status code", - match &result { - Err(status) => status.code() == Code::Unknown, - _ => false, - }, - format!("result={:?}", result) - )); - - assertions.push(test_assert!( - "call must respsond with expected status message", - match &result { - Err(status) => status.message() == TEST_STATUS_MESSAGE, - _ => false, - }, - format!("result={:?}", result) - )); - } - - let simple_req = SimpleRequest { - response_status: Some(EchoStatus { - code: 2, - message: TEST_STATUS_MESSAGE.to_string(), - }), - ..Default::default() - }; - - let duplex_req = StreamingOutputCallRequest { - response_status: Some(EchoStatus { - code: 2, - message: TEST_STATUS_MESSAGE.to_string(), - }), - ..Default::default() - }; - - let result = client.unary_call(Request::new(simple_req)).await; - validate_response(result, assertions); - - let stream = tokio_stream::once(duplex_req); - let result = match client.full_duplex_call(Request::new(stream)).await { - Ok(response) => { - let stream = response.into_inner(); - let responses = stream.collect::>().await; - Ok(responses) - } - Err(e) => Err(e), - }; - - validate_response(result, assertions); -} - -pub async fn special_status_message(client: &mut TestClient, assertions: &mut Vec) { - let req = SimpleRequest { - response_status: Some(EchoStatus { - code: 2, - message: SPECIAL_TEST_STATUS_MESSAGE.to_string(), - }), - ..Default::default() - }; - - let result = client.unary_call(Request::new(req)).await; - - assertions.push(test_assert!( - "call must fail with unknown status code", - match &result { - Err(status) => status.code() == Code::Unknown, - _ => false, - }, - format!("result={:?}", result) - )); - - assertions.push(test_assert!( - "call must respsond with expected status message", - match &result { - Err(status) => status.message() == SPECIAL_TEST_STATUS_MESSAGE, - _ => false, - }, - format!("result={:?}", result) - )); -} - -pub async fn unimplemented_method(client: &mut TestClient, assertions: &mut Vec) { - let result = client.unimplemented_call(Request::new(Empty {})).await; - assertions.push(test_assert!( - "call must fail with unimplemented status code", - match &result { - Err(status) => status.code() == Code::Unimplemented, - _ => false, - }, - format!("result={:?}", result) - )); -} - -pub async fn unimplemented_service( - client: &mut UnimplementedClient, - assertions: &mut Vec, -) { - let result = client.unimplemented_call(Request::new(Empty {})).await; - assertions.push(test_assert!( - "call must fail with unimplemented status code", - match &result { - Err(status) => status.code() == Code::Unimplemented, - _ => false, - }, - format!("result={:?}", result) - )); -} - -pub async fn custom_metadata(client: &mut TestClient, assertions: &mut Vec) { - let key1 = "x-grpc-test-echo-initial"; - let value1: MetadataValue<_> = "test_initial_metadata_value".parse().unwrap(); - let key2 = "x-grpc-test-echo-trailing-bin"; - let value2 = MetadataValue::from_bytes(&[0xab, 0xab, 0xab]); - - let req = SimpleRequest { - response_type: PayloadType::Compressable as i32, - response_size: LARGE_RSP_SIZE, - payload: Some(crate::client_payload(LARGE_REQ_SIZE)), - ..Default::default() - }; - let mut req_unary = Request::new(req); - req_unary.metadata_mut().insert(key1, value1.clone()); - req_unary.metadata_mut().insert_bin(key2, value2.clone()); - - let stream = tokio_stream::once(make_ping_pong_request(0)); - let mut req_stream = Request::new(stream); - req_stream.metadata_mut().insert(key1, value1.clone()); - req_stream.metadata_mut().insert_bin(key2, value2.clone()); - - let response = client - .unary_call(req_unary) - .await - .expect("call should pass."); - - assertions.push(test_assert!( - "metadata string must match in unary", - response.metadata().get(key1) == Some(&value1), - format!("result={:?}", response.metadata().get(key1)) - )); - assertions.push(test_assert!( - "metadata bin must match in unary", - response.metadata().get_bin(key2) == Some(&value2), - format!("result={:?}", response.metadata().get_bin(key1)) - )); + async fn ping_pong(&mut self, assertions: &mut Vec); - let response = client - .full_duplex_call(req_stream) - .await - .expect("call should pass."); + async fn empty_stream(&mut self, assertions: &mut Vec); - assertions.push(test_assert!( - "metadata string must match in unary", - response.metadata().get(key1) == Some(&value1), - format!("result={:?}", response.metadata().get(key1)) - )); + async fn status_code_and_message(&mut self, assertions: &mut Vec); - let mut stream = response.into_inner(); + async fn special_status_message(&mut self, assertions: &mut Vec); - let trailers = stream.trailers().await.unwrap().unwrap(); + async fn unimplemented_method(&mut self, assertions: &mut Vec); - assertions.push(test_assert!( - "metadata bin must match in unary", - trailers.get_bin(key2) == Some(&value2), - format!("result={:?}", trailers.get_bin(key1)) - )); + async fn custom_metadata(&mut self, assertions: &mut Vec); } -fn make_ping_pong_request(idx: usize) -> StreamingOutputCallRequest { - let req_len = REQUEST_LENGTHS[idx]; - let resp_len = RESPONSE_LENGTHS[idx]; - StreamingOutputCallRequest { - response_parameters: vec![ResponseParameters::with_size(resp_len)], - payload: Some(crate::client_payload(req_len as usize)), - ..Default::default() - } +#[async_trait] +pub trait InteropTestUnimplemented: Send { + async fn unimplemented_service(&mut self, assertions: &mut Vec); } diff --git a/interop/src/client_new_codegen.rs b/interop/src/client_new_codegen.rs deleted file mode 100644 index 6823a0c5c..000000000 --- a/interop/src/client_new_codegen.rs +++ /dev/null @@ -1,422 +0,0 @@ -/* - * - * Copyright 2025 gRPC authors. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - */ - -use crate::{ - grpc_pb::test_service_client::*, grpc_pb::unimplemented_service_client::*, grpc_pb::*, - test_assert, TestAssertion, -}; -use protobuf::__internal::MatcherEq; -use protobuf::proto; -use tokio::sync::mpsc; -use tokio_stream::StreamExt; -use tonic::transport::Channel; -use tonic::{metadata::MetadataValue, Code, Request, Response, Status}; - -pub type TestClient = TestServiceClient; -pub type UnimplementedClient = UnimplementedServiceClient; - -const LARGE_REQ_SIZE: usize = 271_828; -const LARGE_RSP_SIZE: i32 = 314_159; -const REQUEST_LENGTHS: &[i32] = &[27182, 8, 1828, 45904]; -const RESPONSE_LENGTHS: &[i32] = &[31415, 9, 2653, 58979]; -const TEST_STATUS_MESSAGE: &str = "test status message"; -const SPECIAL_TEST_STATUS_MESSAGE: &str = - "\t\ntest with whitespace\r\nand Unicode BMP ☺ and non-BMP 😈\t\n"; - -pub async fn empty_unary(client: &mut TestClient, assertions: &mut Vec) { - let result = client.empty_call(Request::new(Empty::default())).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result { - let body = response.into_inner(); - assertions.push(test_assert!( - "body must not be null", - body.matches(&Empty::default()), - format!("body={:?}", body) - )); - } -} - -pub async fn large_unary(client: &mut TestClient, assertions: &mut Vec) { - use std::mem; - let payload = crate::grpc_utils::client_payload(LARGE_REQ_SIZE); - let req = proto!(SimpleRequest { - response_type: PayloadType::Compressable, - response_size: LARGE_RSP_SIZE, - payload: payload, - }); - - let result = client.unary_call(Request::new(req)).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result { - let body = response.into_inner(); - let payload_len = body.payload().body().len(); - - assertions.push(test_assert!( - "body must be 314159 bytes", - payload_len == LARGE_RSP_SIZE as usize, - format!("mem::size_of_val(&body)={:?}", mem::size_of_val(&body)) - )); - } -} - -pub async fn client_streaming(client: &mut TestClient, assertions: &mut Vec) { - let requests = REQUEST_LENGTHS.iter().map(|len| { - proto!(StreamingInputCallRequest { - payload: crate::grpc_utils::client_payload(*len as usize), - }) - }); - - let stream = tokio_stream::iter(requests); - - let result = client.streaming_input_call(Request::new(stream)).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result { - let body = response.into_inner(); - - assertions.push(test_assert!( - "aggregated payload size must be 74922 bytes", - body.aggregated_payload_size() == 74922, - format!( - "aggregated_payload_size={:?}", - body.aggregated_payload_size() - ) - )); - } -} - -pub async fn server_streaming(client: &mut TestClient, assertions: &mut Vec) { - let req = proto!(StreamingOutputCallRequest { - response_parameters: RESPONSE_LENGTHS - .iter() - .map(|len| ResponseParameters::with_size(*len)), - }); - let req = Request::new(req); - - let result = client.streaming_output_call(req).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result { - let responses = response - .into_inner() - .filter_map(|m| m.ok()) - .collect::>() - .await; - let actual_response_lengths = crate::grpc_utils::response_lengths(&responses); - let asserts = vec![ - test_assert!( - "there should be four responses", - responses.len() == 4, - format!("responses.len()={:?}", responses.len()) - ), - test_assert!( - "the response payload sizes should match input", - RESPONSE_LENGTHS == actual_response_lengths.as_slice(), - format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) - ), - ]; - - assertions.extend(asserts); - } -} - -pub async fn ping_pong(client: &mut TestClient, assertions: &mut Vec) { - let (tx, rx) = mpsc::unbounded_channel(); - tx.send(make_ping_pong_request(0)).unwrap(); - - let result = client - .full_duplex_call(Request::new( - tokio_stream::wrappers::UnboundedReceiverStream::new(rx), - )) - .await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(mut response) = result.map(Response::into_inner) { - let mut responses = Vec::new(); - - loop { - match response.next().await { - Some(result) => { - responses.push(result.unwrap()); - if responses.len() == REQUEST_LENGTHS.len() { - drop(tx); - break; - } else { - tx.send(make_ping_pong_request(responses.len())).unwrap(); - } - } - None => { - assertions.push(TestAssertion::Failed { - description: - "server should keep the stream open until the client closes it", - expression: "Stream terminated unexpectedly early", - why: None, - }); - break; - } - } - } - - let actual_response_lengths = crate::grpc_utils::response_lengths(&responses); - assertions.push(test_assert!( - "there should be four responses", - responses.len() == RESPONSE_LENGTHS.len(), - format!("{:?}={:?}", responses.len(), RESPONSE_LENGTHS.len()) - )); - assertions.push(test_assert!( - "the response payload sizes should match input", - RESPONSE_LENGTHS == actual_response_lengths.as_slice(), - format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) - )); - } -} - -pub async fn empty_stream(client: &mut TestClient, assertions: &mut Vec) { - let stream = tokio_stream::empty(); - let result = client.full_duplex_call(Request::new(stream)).await; - - assertions.push(test_assert!( - "call must be successful", - result.is_ok(), - format!("result={:?}", result) - )); - - if let Ok(response) = result.map(Response::into_inner) { - let responses = response.collect::>().await; - - assertions.push(test_assert!( - "there should be no responses", - responses.is_empty(), - format!("responses.len()={:?}", responses.len()) - )); - } -} - -pub async fn status_code_and_message(client: &mut TestClient, assertions: &mut Vec) { - fn validate_response(result: Result, assertions: &mut Vec) - where - T: std::fmt::Debug, - { - assertions.push(test_assert!( - "call must fail with unknown status code", - match &result { - Err(status) => status.code() == Code::Unknown, - _ => false, - }, - format!("result={:?}", result) - )); - - assertions.push(test_assert!( - "call must respsond with expected status message", - match &result { - Err(status) => status.message() == TEST_STATUS_MESSAGE, - _ => false, - }, - format!("result={:?}", result) - )); - } - - let simple_req = proto!(SimpleRequest { - response_status: EchoStatus { - code: 2, - message: TEST_STATUS_MESSAGE.to_string(), - }, - }); - - let duplex_req = proto!(StreamingOutputCallRequest { - response_status: EchoStatus { - code: 2, - message: TEST_STATUS_MESSAGE.to_string(), - }, - }); - - let result = client.unary_call(Request::new(simple_req)).await; - validate_response(result, assertions); - - let stream = tokio_stream::once(duplex_req); - let result = match client.full_duplex_call(Request::new(stream)).await { - Ok(response) => { - let stream = response.into_inner(); - let responses = stream.collect::>().await; - Ok(responses) - } - Err(e) => Err(e), - }; - - validate_response(result, assertions); -} - -pub async fn special_status_message(client: &mut TestClient, assertions: &mut Vec) { - let req = proto!(SimpleRequest { - response_status: EchoStatus { - code: 2, - message: SPECIAL_TEST_STATUS_MESSAGE.to_string(), - }, - }); - - let result = client.unary_call(Request::new(req)).await; - - assertions.push(test_assert!( - "call must fail with unknown status code", - match &result { - Err(status) => status.code() == Code::Unknown, - _ => false, - }, - format!("result={:?}", result) - )); - - assertions.push(test_assert!( - "call must respsond with expected status message", - match &result { - Err(status) => status.message() == SPECIAL_TEST_STATUS_MESSAGE, - _ => false, - }, - format!("result={:?}", result) - )); -} - -pub async fn unimplemented_method(client: &mut TestClient, assertions: &mut Vec) { - let result = client - .unimplemented_call(Request::new(Empty::default())) - .await; - assertions.push(test_assert!( - "call must fail with unimplemented status code", - match &result { - Err(status) => status.code() == Code::Unimplemented, - _ => false, - }, - format!("result={:?}", result) - )); -} - -pub async fn unimplemented_service( - client: &mut UnimplementedClient, - assertions: &mut Vec, -) { - let result = client - .unimplemented_call(Request::new(Empty::default())) - .await; - assertions.push(test_assert!( - "call must fail with unimplemented status code", - match &result { - Err(status) => status.code() == Code::Unimplemented, - _ => false, - }, - format!("result={:?}", result) - )); -} - -pub async fn custom_metadata(client: &mut TestClient, assertions: &mut Vec) { - let key1 = "x-grpc-test-echo-initial"; - let value1: MetadataValue<_> = "test_initial_metadata_value".parse().unwrap(); - let key2 = "x-grpc-test-echo-trailing-bin"; - let value2 = MetadataValue::from_bytes(&[0xab, 0xab, 0xab]); - - let req = proto!(SimpleRequest { - response_type: PayloadType::Compressable, - response_size: LARGE_RSP_SIZE, - payload: crate::grpc_utils::client_payload(LARGE_REQ_SIZE), - }); - let mut req_unary = Request::new(req); - req_unary.metadata_mut().insert(key1, value1.clone()); - req_unary.metadata_mut().insert_bin(key2, value2.clone()); - - let stream = tokio_stream::once(make_ping_pong_request(0)); - let mut req_stream = Request::new(stream); - req_stream.metadata_mut().insert(key1, value1.clone()); - req_stream.metadata_mut().insert_bin(key2, value2.clone()); - - let response = client - .unary_call(req_unary) - .await - .expect("call should pass."); - - assertions.push(test_assert!( - "metadata string must match in unary", - response.metadata().get(key1) == Some(&value1), - format!("result={:?}", response.metadata().get(key1)) - )); - assertions.push(test_assert!( - "metadata bin must match in unary", - response.metadata().get_bin(key2) == Some(&value2), - format!("result={:?}", response.metadata().get_bin(key1)) - )); - - let response = client - .full_duplex_call(req_stream) - .await - .expect("call should pass."); - - assertions.push(test_assert!( - "metadata string must match in unary", - response.metadata().get(key1) == Some(&value1), - format!("result={:?}", response.metadata().get(key1)) - )); - - let mut stream = response.into_inner(); - - let trailers = stream.trailers().await.unwrap().unwrap(); - - assertions.push(test_assert!( - "metadata bin must match in unary", - trailers.get_bin(key2) == Some(&value2), - format!("result={:?}", trailers.get_bin(key1)) - )); -} - -fn make_ping_pong_request(idx: usize) -> StreamingOutputCallRequest { - let req_len = REQUEST_LENGTHS[idx]; - let resp_len = RESPONSE_LENGTHS[idx]; - proto!(StreamingOutputCallRequest { - response_parameters: std::iter::once(ResponseParameters::with_size(resp_len)), - payload: crate::grpc_utils::client_payload(req_len as usize), - }) -} diff --git a/interop/src/client_prost.rs b/interop/src/client_prost.rs new file mode 100644 index 000000000..50299b8a2 --- /dev/null +++ b/interop/src/client_prost.rs @@ -0,0 +1,419 @@ +use crate::client::{InteropTest, InteropTestUnimplemented}; +use crate::{ + pb::test_service_client::*, pb::unimplemented_service_client::*, pb::*, test_assert, + TestAssertion, +}; +use tokio::sync::mpsc; +use tokio_stream::StreamExt; +use tonic::async_trait; +use tonic::transport::Channel; +use tonic::{metadata::MetadataValue, Code, Request, Response, Status}; + +pub type TestClient = TestServiceClient; +pub type UnimplementedClient = UnimplementedServiceClient; + +const LARGE_REQ_SIZE: usize = 271_828; +const LARGE_RSP_SIZE: i32 = 314_159; +const REQUEST_LENGTHS: &[i32] = &[27182, 8, 1828, 45904]; +const RESPONSE_LENGTHS: &[i32] = &[31415, 9, 2653, 58979]; +const TEST_STATUS_MESSAGE: &str = "test status message"; +const SPECIAL_TEST_STATUS_MESSAGE: &str = + "\t\ntest with whitespace\r\nand Unicode BMP ☺ and non-BMP 😈\t\n"; + +#[async_trait] +impl InteropTest for TestClient { + async fn empty_unary(&mut self, assertions: &mut Vec) { + let result = self.empty_call(Request::new(Empty {})).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + assertions.push(test_assert!( + "body must not be null", + body == Empty {}, + format!("body={:?}", body) + )); + } + } + + async fn large_unary(&mut self, assertions: &mut Vec) { + use std::mem; + let payload = crate::client_payload(LARGE_REQ_SIZE); + let req = SimpleRequest { + response_type: PayloadType::Compressable as i32, + response_size: LARGE_RSP_SIZE, + payload: Some(payload), + ..Default::default() + }; + + let result = self.unary_call(Request::new(req)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + let payload_len = body.payload.as_ref().map(|p| p.body.len()).unwrap_or(0); + + assertions.push(test_assert!( + "body must be 314159 bytes", + payload_len == LARGE_RSP_SIZE as usize, + format!("mem::size_of_val(&body)={:?}", mem::size_of_val(&body)) + )); + } + } + + // async fn cachable_unary(client: &mut Client, assertions: &mut Vec) { + // let payload = Payload { + // r#type: PayloadType::Compressable as i32, + // body: format!("{:?}", std::time::Instant::now()).into_bytes(), + // }; + // let req = SimpleRequest { + // response_type: PayloadType::Compressable as i32, + // payload: Some(payload), + // ..Default::default() + // }; + + // self. + // } + + async fn client_streaming(&mut self, assertions: &mut Vec) { + let requests: Vec<_> = REQUEST_LENGTHS + .iter() + .map(make_streaming_input_request) + .collect(); + + let stream = tokio_stream::iter(requests); + + let result = self.streaming_input_call(Request::new(stream)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + + assertions.push(test_assert!( + "aggregated payload size must be 74922 bytes", + body.aggregated_payload_size == 74922, + format!("aggregated_payload_size={:?}", body.aggregated_payload_size) + )); + } + } + + async fn server_streaming(&mut self, assertions: &mut Vec) { + let req = StreamingOutputCallRequest { + response_parameters: RESPONSE_LENGTHS + .iter() + .map(|len| ResponseParameters::with_size(*len)) + .collect(), + ..Default::default() + }; + let req = Request::new(req); + + let result = self.streaming_output_call(req).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let responses = response + .into_inner() + .filter_map(|m| m.ok()) + .collect::>() + .await; + let actual_response_lengths = crate::response_lengths(&responses); + let asserts = vec![ + test_assert!( + "there should be four responses", + responses.len() == 4, + format!("responses.len()={:?}", responses.len()) + ), + test_assert!( + "the response payload sizes should match input", + RESPONSE_LENGTHS == actual_response_lengths.as_slice(), + format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) + ), + ]; + + assertions.extend(asserts); + } + } + + async fn ping_pong(&mut self, assertions: &mut Vec) { + let (tx, rx) = mpsc::unbounded_channel(); + tx.send(make_ping_pong_request(0)).unwrap(); + + let result = self + .full_duplex_call(Request::new( + tokio_stream::wrappers::UnboundedReceiverStream::new(rx), + )) + .await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(mut response) = result.map(Response::into_inner) { + let mut responses = Vec::new(); + + loop { + match response.next().await { + Some(result) => { + responses.push(result.unwrap()); + if responses.len() == REQUEST_LENGTHS.len() { + drop(tx); + break; + } else { + tx.send(make_ping_pong_request(responses.len())).unwrap(); + } + } + None => { + assertions.push(TestAssertion::Failed { + description: + "server should keep the stream open until the client closes it", + expression: "Stream terminated unexpectedly early", + why: None, + }); + break; + } + } + } + + let actual_response_lengths = crate::response_lengths(&responses); + assertions.push(test_assert!( + "there should be four responses", + responses.len() == RESPONSE_LENGTHS.len(), + format!("{:?}={:?}", responses.len(), RESPONSE_LENGTHS.len()) + )); + assertions.push(test_assert!( + "the response payload sizes should match input", + RESPONSE_LENGTHS == actual_response_lengths.as_slice(), + format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) + )); + } + } + + async fn empty_stream(&mut self, assertions: &mut Vec) { + let stream = tokio_stream::empty(); + let result = self.full_duplex_call(Request::new(stream)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result.map(Response::into_inner) { + let responses = response.collect::>().await; + + assertions.push(test_assert!( + "there should be no responses", + responses.is_empty(), + format!("responses.len()={:?}", responses.len()) + )); + } + } + + async fn status_code_and_message(&mut self, assertions: &mut Vec) { + fn validate_response(result: Result, assertions: &mut Vec) + where + T: std::fmt::Debug, + { + assertions.push(test_assert!( + "call must fail with unknown status code", + match &result { + Err(status) => status.code() == Code::Unknown, + _ => false, + }, + format!("result={:?}", result) + )); + + assertions.push(test_assert!( + "call must respsond with expected status message", + match &result { + Err(status) => status.message() == TEST_STATUS_MESSAGE, + _ => false, + }, + format!("result={:?}", result) + )); + } + + let simple_req = SimpleRequest { + response_status: Some(EchoStatus { + code: 2, + message: TEST_STATUS_MESSAGE.to_string(), + }), + ..Default::default() + }; + + let duplex_req = StreamingOutputCallRequest { + response_status: Some(EchoStatus { + code: 2, + message: TEST_STATUS_MESSAGE.to_string(), + }), + ..Default::default() + }; + + let result = self.unary_call(Request::new(simple_req)).await; + validate_response(result, assertions); + + let stream = tokio_stream::once(duplex_req); + let result = match self.full_duplex_call(Request::new(stream)).await { + Ok(response) => { + let stream = response.into_inner(); + let responses = stream.collect::>().await; + Ok(responses) + } + Err(e) => Err(e), + }; + + validate_response(result, assertions); + } + + async fn special_status_message(&mut self, assertions: &mut Vec) { + let req = SimpleRequest { + response_status: Some(EchoStatus { + code: 2, + message: SPECIAL_TEST_STATUS_MESSAGE.to_string(), + }), + ..Default::default() + }; + + let result = self.unary_call(Request::new(req)).await; + + assertions.push(test_assert!( + "call must fail with unknown status code", + match &result { + Err(status) => status.code() == Code::Unknown, + _ => false, + }, + format!("result={:?}", result) + )); + + assertions.push(test_assert!( + "call must respsond with expected status message", + match &result { + Err(status) => status.message() == SPECIAL_TEST_STATUS_MESSAGE, + _ => false, + }, + format!("result={:?}", result) + )); + } + + async fn unimplemented_method(&mut self, assertions: &mut Vec) { + let result = self.unimplemented_call(Request::new(Empty {})).await; + assertions.push(test_assert!( + "call must fail with unimplemented status code", + match &result { + Err(status) => status.code() == Code::Unimplemented, + _ => false, + }, + format!("result={:?}", result) + )); + } + + async fn custom_metadata(&mut self, assertions: &mut Vec) { + let key1 = "x-grpc-test-echo-initial"; + let value1: MetadataValue<_> = "test_initial_metadata_value".parse().unwrap(); + let key2 = "x-grpc-test-echo-trailing-bin"; + let value2 = MetadataValue::from_bytes(&[0xab, 0xab, 0xab]); + + let req = SimpleRequest { + response_type: PayloadType::Compressable as i32, + response_size: LARGE_RSP_SIZE, + payload: Some(crate::client_payload(LARGE_REQ_SIZE)), + ..Default::default() + }; + let mut req_unary = Request::new(req); + req_unary.metadata_mut().insert(key1, value1.clone()); + req_unary.metadata_mut().insert_bin(key2, value2.clone()); + + let stream = tokio_stream::once(make_ping_pong_request(0)); + let mut req_stream = Request::new(stream); + req_stream.metadata_mut().insert(key1, value1.clone()); + req_stream.metadata_mut().insert_bin(key2, value2.clone()); + + let response = self.unary_call(req_unary).await.expect("call should pass."); + + assertions.push(test_assert!( + "metadata string must match in unary", + response.metadata().get(key1) == Some(&value1), + format!("result={:?}", response.metadata().get(key1)) + )); + assertions.push(test_assert!( + "metadata bin must match in unary", + response.metadata().get_bin(key2) == Some(&value2), + format!("result={:?}", response.metadata().get_bin(key1)) + )); + + let response = self + .full_duplex_call(req_stream) + .await + .expect("call should pass."); + + assertions.push(test_assert!( + "metadata string must match in unary", + response.metadata().get(key1) == Some(&value1), + format!("result={:?}", response.metadata().get(key1)) + )); + + let mut stream = response.into_inner(); + + let trailers = stream.trailers().await.unwrap().unwrap(); + + assertions.push(test_assert!( + "metadata bin must match in unary", + trailers.get_bin(key2) == Some(&value2), + format!("result={:?}", trailers.get_bin(key1)) + )); + } +} + +#[async_trait] +impl InteropTestUnimplemented for UnimplementedClient { + async fn unimplemented_service(&mut self, assertions: &mut Vec) { + let result = self.unimplemented_call(Request::new(Empty {})).await; + assertions.push(test_assert!( + "call must fail with unimplemented status code", + match &result { + Err(status) => status.code() == Code::Unimplemented, + _ => false, + }, + format!("result={:?}", result) + )); + } +} + +fn make_ping_pong_request(idx: usize) -> StreamingOutputCallRequest { + let req_len = REQUEST_LENGTHS[idx]; + let resp_len = RESPONSE_LENGTHS[idx]; + StreamingOutputCallRequest { + response_parameters: vec![ResponseParameters::with_size(resp_len)], + payload: Some(crate::client_payload(req_len as usize)), + ..Default::default() + } +} + +fn make_streaming_input_request(len: &i32) -> StreamingInputCallRequest { + StreamingInputCallRequest { + payload: Some(crate::client_payload(*len as usize)), + ..Default::default() + } +} diff --git a/interop/src/client_protobuf.rs b/interop/src/client_protobuf.rs new file mode 100644 index 000000000..456251e0d --- /dev/null +++ b/interop/src/client_protobuf.rs @@ -0,0 +1,429 @@ +/* + * + * Copyright 2025 gRPC authors. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + */ + +use crate::client::{InteropTest, InteropTestUnimplemented}; +use crate::{ + grpc_pb::test_service_client::*, grpc_pb::unimplemented_service_client::*, grpc_pb::*, + test_assert, TestAssertion, +}; +use protobuf::__internal::MatcherEq; +use protobuf::proto; +use tokio::sync::mpsc; +use tokio_stream::StreamExt; +use tonic::async_trait; +use tonic::transport::Channel; +use tonic::{metadata::MetadataValue, Code, Request, Response, Status}; + +pub type TestClient = TestServiceClient; +pub type UnimplementedClient = UnimplementedServiceClient; + +const LARGE_REQ_SIZE: usize = 271_828; +const LARGE_RSP_SIZE: i32 = 314_159; +const REQUEST_LENGTHS: &[i32] = &[27182, 8, 1828, 45904]; +const RESPONSE_LENGTHS: &[i32] = &[31415, 9, 2653, 58979]; +const TEST_STATUS_MESSAGE: &str = "test status message"; +const SPECIAL_TEST_STATUS_MESSAGE: &str = + "\t\ntest with whitespace\r\nand Unicode BMP ☺ and non-BMP 😈\t\n"; + +#[async_trait] +impl InteropTest for TestClient { + async fn empty_unary(&mut self, assertions: &mut Vec) { + let result = self.empty_call(Request::new(Empty::default())).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + assertions.push(test_assert!( + "body must not be null", + body.matches(&Empty::default()), + format!("body={:?}", body) + )); + } + } + + async fn large_unary(&mut self, assertions: &mut Vec) { + use std::mem; + let payload = crate::grpc_utils::client_payload(LARGE_REQ_SIZE); + let req = proto!(SimpleRequest { + response_type: PayloadType::Compressable, + response_size: LARGE_RSP_SIZE, + payload: payload, + }); + + let result = self.unary_call(Request::new(req)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + let payload_len = body.payload().body().len(); + + assertions.push(test_assert!( + "body must be 314159 bytes", + payload_len == LARGE_RSP_SIZE as usize, + format!("mem::size_of_val(&body)={:?}", mem::size_of_val(&body)) + )); + } + } + + async fn client_streaming(&mut self, assertions: &mut Vec) { + let requests: Vec<_> = REQUEST_LENGTHS + .iter() + .map(make_streaming_input_request) + .collect(); + + let stream = tokio_stream::iter(requests); + + let result = self.streaming_input_call(Request::new(stream)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let body = response.into_inner(); + + assertions.push(test_assert!( + "aggregated payload size must be 74922 bytes", + body.aggregated_payload_size() == 74922, + format!( + "aggregated_payload_size={:?}", + body.aggregated_payload_size() + ) + )); + } + } + + async fn server_streaming(&mut self, assertions: &mut Vec) { + let req = proto!(StreamingOutputCallRequest { + response_parameters: RESPONSE_LENGTHS + .iter() + .map(|len| ResponseParameters::with_size(*len)), + }); + let req = Request::new(req); + + let result = self.streaming_output_call(req).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result { + let responses = response + .into_inner() + .filter_map(|m| m.ok()) + .collect::>() + .await; + let actual_response_lengths = crate::grpc_utils::response_lengths(&responses); + let asserts = vec![ + test_assert!( + "there should be four responses", + responses.len() == 4, + format!("responses.len()={:?}", responses.len()) + ), + test_assert!( + "the response payload sizes should match input", + RESPONSE_LENGTHS == actual_response_lengths.as_slice(), + format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) + ), + ]; + + assertions.extend(asserts); + } + } + + async fn ping_pong(&mut self, assertions: &mut Vec) { + let (tx, rx) = mpsc::unbounded_channel(); + tx.send(make_ping_pong_request(0)).unwrap(); + + let result = self + .full_duplex_call(Request::new( + tokio_stream::wrappers::UnboundedReceiverStream::new(rx), + )) + .await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(mut response) = result.map(Response::into_inner) { + let mut responses = Vec::new(); + + loop { + match response.next().await { + Some(result) => { + responses.push(result.unwrap()); + if responses.len() == REQUEST_LENGTHS.len() { + drop(tx); + break; + } else { + tx.send(make_ping_pong_request(responses.len())).unwrap(); + } + } + None => { + assertions.push(TestAssertion::Failed { + description: + "server should keep the stream open until the client closes it", + expression: "Stream terminated unexpectedly early", + why: None, + }); + break; + } + } + } + + let actual_response_lengths = crate::grpc_utils::response_lengths(&responses); + assertions.push(test_assert!( + "there should be four responses", + responses.len() == RESPONSE_LENGTHS.len(), + format!("{:?}={:?}", responses.len(), RESPONSE_LENGTHS.len()) + )); + assertions.push(test_assert!( + "the response payload sizes should match input", + RESPONSE_LENGTHS == actual_response_lengths.as_slice(), + format!("{:?}={:?}", RESPONSE_LENGTHS, actual_response_lengths) + )); + } + } + + async fn empty_stream(&mut self, assertions: &mut Vec) { + let stream = tokio_stream::empty(); + let result = self.full_duplex_call(Request::new(stream)).await; + + assertions.push(test_assert!( + "call must be successful", + result.is_ok(), + format!("result={:?}", result) + )); + + if let Ok(response) = result.map(Response::into_inner) { + let responses = response.collect::>().await; + + assertions.push(test_assert!( + "there should be no responses", + responses.is_empty(), + format!("responses.len()={:?}", responses.len()) + )); + } + } + + async fn status_code_and_message(&mut self, assertions: &mut Vec) { + fn validate_response(result: Result, assertions: &mut Vec) + where + T: std::fmt::Debug, + { + assertions.push(test_assert!( + "call must fail with unknown status code", + match &result { + Err(status) => status.code() == Code::Unknown, + _ => false, + }, + format!("result={:?}", result) + )); + + assertions.push(test_assert!( + "call must respsond with expected status message", + match &result { + Err(status) => status.message() == TEST_STATUS_MESSAGE, + _ => false, + }, + format!("result={:?}", result) + )); + } + + let simple_req = proto!(SimpleRequest { + response_status: EchoStatus { + code: 2, + message: TEST_STATUS_MESSAGE.to_string(), + }, + }); + + let duplex_req = proto!(StreamingOutputCallRequest { + response_status: EchoStatus { + code: 2, + message: TEST_STATUS_MESSAGE.to_string(), + }, + }); + + let result = self.unary_call(Request::new(simple_req)).await; + validate_response(result, assertions); + + let stream = tokio_stream::once(duplex_req); + let result = match self.full_duplex_call(Request::new(stream)).await { + Ok(response) => { + let stream = response.into_inner(); + let responses = stream.collect::>().await; + Ok(responses) + } + Err(e) => Err(e), + }; + + validate_response(result, assertions); + } + + async fn special_status_message(&mut self, assertions: &mut Vec) { + let req = proto!(SimpleRequest { + response_status: EchoStatus { + code: 2, + message: SPECIAL_TEST_STATUS_MESSAGE.to_string(), + }, + }); + + let result = self.unary_call(Request::new(req)).await; + + assertions.push(test_assert!( + "call must fail with unknown status code", + match &result { + Err(status) => status.code() == Code::Unknown, + _ => false, + }, + format!("result={:?}", result) + )); + + assertions.push(test_assert!( + "call must respsond with expected status message", + match &result { + Err(status) => status.message() == SPECIAL_TEST_STATUS_MESSAGE, + _ => false, + }, + format!("result={:?}", result) + )); + } + + async fn unimplemented_method(&mut self, assertions: &mut Vec) { + let result = self + .unimplemented_call(Request::new(Empty::default())) + .await; + assertions.push(test_assert!( + "call must fail with unimplemented status code", + match &result { + Err(status) => status.code() == Code::Unimplemented, + _ => false, + }, + format!("result={:?}", result) + )); + } + + async fn custom_metadata(&mut self, assertions: &mut Vec) { + let key1 = "x-grpc-test-echo-initial"; + let value1: MetadataValue<_> = "test_initial_metadata_value".parse().unwrap(); + let key2 = "x-grpc-test-echo-trailing-bin"; + let value2 = MetadataValue::from_bytes(&[0xab, 0xab, 0xab]); + + let req = proto!(SimpleRequest { + response_type: PayloadType::Compressable, + response_size: LARGE_RSP_SIZE, + payload: crate::grpc_utils::client_payload(LARGE_REQ_SIZE), + }); + let mut req_unary = Request::new(req); + req_unary.metadata_mut().insert(key1, value1.clone()); + req_unary.metadata_mut().insert_bin(key2, value2.clone()); + + let stream = tokio_stream::once(make_ping_pong_request(0)); + let mut req_stream = Request::new(stream); + req_stream.metadata_mut().insert(key1, value1.clone()); + req_stream.metadata_mut().insert_bin(key2, value2.clone()); + + let response = self.unary_call(req_unary).await.expect("call should pass."); + + assertions.push(test_assert!( + "metadata string must match in unary", + response.metadata().get(key1) == Some(&value1), + format!("result={:?}", response.metadata().get(key1)) + )); + assertions.push(test_assert!( + "metadata bin must match in unary", + response.metadata().get_bin(key2) == Some(&value2), + format!("result={:?}", response.metadata().get_bin(key1)) + )); + + let response = self + .full_duplex_call(req_stream) + .await + .expect("call should pass."); + + assertions.push(test_assert!( + "metadata string must match in unary", + response.metadata().get(key1) == Some(&value1), + format!("result={:?}", response.metadata().get(key1)) + )); + + let mut stream = response.into_inner(); + + let trailers = stream.trailers().await.unwrap().unwrap(); + + assertions.push(test_assert!( + "metadata bin must match in unary", + trailers.get_bin(key2) == Some(&value2), + format!("result={:?}", trailers.get_bin(key1)) + )); + } +} + +#[async_trait] +impl InteropTestUnimplemented for UnimplementedClient { + async fn unimplemented_service(&mut self, assertions: &mut Vec) { + let result = self + .unimplemented_call(Request::new(Empty::default())) + .await; + assertions.push(test_assert!( + "call must fail with unimplemented status code", + match &result { + Err(status) => status.code() == Code::Unimplemented, + _ => false, + }, + format!("result={:?}", result) + )); + } +} + +fn make_ping_pong_request(idx: usize) -> StreamingOutputCallRequest { + let req_len = REQUEST_LENGTHS[idx]; + let resp_len = RESPONSE_LENGTHS[idx]; + proto!(StreamingOutputCallRequest { + response_parameters: std::iter::once(ResponseParameters::with_size(resp_len)), + payload: crate::grpc_utils::client_payload(req_len as usize), + }) +} + +fn make_streaming_input_request(len: &i32) -> StreamingInputCallRequest { + proto!(StreamingInputCallRequest { + payload: crate::grpc_utils::client_payload(*len as usize), + }) +} diff --git a/interop/src/lib.rs b/interop/src/lib.rs index d9507a555..a3dbdb627 100644 --- a/interop/src/lib.rs +++ b/interop/src/lib.rs @@ -1,7 +1,8 @@ #![recursion_limit = "256"] pub mod client; -pub mod client_new_codegen; +pub mod client_prost; +pub mod client_protobuf; pub mod server; pub mod pb { diff --git a/interop/test.sh b/interop/test.sh index 974afb2d9..4e5ff5933 100755 --- a/interop/test.sh +++ b/interop/test.sh @@ -57,10 +57,10 @@ trap 'echo ":; killing test server"; kill ${SERVER_PID};' EXIT sleep 1 -./target/debug/client --test_case="${JOINED_TEST_CASES}" "${ARG}" +./target/debug/client --codec=prost --test_case="${JOINED_TEST_CASES}" "${ARG}" # Test a grpc rust client against a Go server. -./target/debug/client_new_codegen --test_case="${JOINED_TEST_CASES}" ${ARG} +./target/debug/client --codec=protobuf --test_case="${JOINED_TEST_CASES}" ${ARG} echo ":; killing test server"; kill "${SERVER_PID}"; @@ -75,7 +75,7 @@ trap 'echo ":; killing test server"; kill ${SERVER_PID};' EXIT sleep 1 -./target/debug/client --test_case="${JOINED_TEST_CASES}" "${ARG}" +./target/debug/client --codec=prost --test_case="${JOINED_TEST_CASES}" "${ARG}" # Run client test cases if [ -n "${ARG:-}" ]; then From 726d22788abd6258592cd16ddaca1f7b21b58053 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Mon, 21 Jul 2025 23:25:55 +0530 Subject: [PATCH 22/26] Fix cpp function name casing --- protoc-gen-rust-grpc/src/grpc_rust_generator.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc index d1d05d476..d1c0561ec 100644 --- a/protoc-gen-rust-grpc/src/grpc_rust_generator.cc +++ b/protoc-gen-rust-grpc/src/grpc_rust_generator.cc @@ -401,8 +401,8 @@ static void GenerateMethods(Printer &printer, const Service &service, } } -static void generate_client(const Service &service, Printer &printer, - const GrpcOpts &opts) { +static void GenerateClient(const Service &service, Printer &printer, + const GrpcOpts &opts) { std::string service_ident = absl::StrFormat("%sClient", service.Name()); std::string client_mod = absl::StrFormat("%s_client", rust::CamelToSnakeCase(service.Name())); @@ -517,7 +517,7 @@ static void generate_client(const Service &service, Printer &printer, void GenerateService(protobuf::io::Printer &printer, const ServiceDescriptor *service_desc, const GrpcOpts &opts) { - client::generate_client(Service(service_desc), printer, opts); + client::GenerateClient(Service(service_desc), printer, opts); } std::string GetRsGrpcFile(const protobuf::FileDescriptor &file) { From d13eed51243b68199272a5cfe1674ad54b306274 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Wed, 23 Jul 2025 13:53:55 +0530 Subject: [PATCH 23/26] Address review --- grpc/src/macros.rs | 14 ++++++++++++++ interop/src/lib.rs | 2 +- tonic-protobuf/src/lib.rs | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/grpc/src/macros.rs b/grpc/src/macros.rs index 4bc087826..aaf0bb942 100644 --- a/grpc/src/macros.rs +++ b/grpc/src/macros.rs @@ -43,6 +43,15 @@ /// - The output directory is set to the [`OUT_DIR`] environment variable. /// - The message path is set to `self`. /// +/// If your `.proto` files are not in a subdirectory, you can omit the first +/// parameter. +/// +/// ```rust,ignore +/// mod pb { +/// grpc::include_proto!("helloworld"); +/// } +/// ``` +/// /// If you have modified the output directory or message path, you should /// include the generated code manually instead of using this macro. /// @@ -76,6 +85,11 @@ /// [`OUT_DIR`]: https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts #[macro_export] macro_rules! include_proto { + // Assume the generated output dir is OUT_DIR. + ($proto_file:literal) => { + $crate::include_proto!("", $proto_file); + }; + ($parent_dir:literal, $proto_file:literal) => { include!(concat!(env!("OUT_DIR"), "/", $parent_dir, "/generated.rs")); include!(concat!( diff --git a/interop/src/lib.rs b/interop/src/lib.rs index a3dbdb627..239512534 100644 --- a/interop/src/lib.rs +++ b/interop/src/lib.rs @@ -12,7 +12,7 @@ pub mod pb { } pub mod grpc_pb { - grpc::include_proto!("", "test"); + grpc::include_proto!("test"); } use std::{default, fmt, iter}; diff --git a/tonic-protobuf/src/lib.rs b/tonic-protobuf/src/lib.rs index 4de4c1177..9914325ff 100644 --- a/tonic-protobuf/src/lib.rs +++ b/tonic-protobuf/src/lib.rs @@ -84,7 +84,7 @@ impl Encoder for ProtoEncoder { // The protobuf library doesn't support serializing into a user-provided // buffer. Instead, it allocates its own buffer, resulting in an extra // copy and allocation. - // TODO: Find a way to avoid this extra copy. + // TODO: #2345 - Find a way to avoid this extra copy. let serialized = item.serialize().map_err(from_decode_error)?; buf.put_slice(serialized.as_slice()); Ok(()) From c33f3805ea6b79a32887686c60ab0043d66ad72b Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Wed, 23 Jul 2025 14:42:55 +0530 Subject: [PATCH 24/26] Align tonic-* crate versions in README --- tonic-protobuf-build/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tonic-protobuf-build/README.md b/tonic-protobuf-build/README.md index 04fb2c2d0..3e3334e37 100644 --- a/tonic-protobuf-build/README.md +++ b/tonic-protobuf-build/README.md @@ -11,10 +11,10 @@ Required dependencies [dependencies] tonic = "" protobuf = "" -tonic-protobuf = "" +tonic-protobuf = "" [build-dependencies] -tonic-protobuf-build = "" +tonic-protobuf-build = "" ``` You must ensure you have the following programs in your PATH: From 00b6d63cf35d39e5078fcf254646013f74b9edd2 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Fri, 25 Jul 2025 15:16:41 +0530 Subject: [PATCH 25/26] Use include! instead of path --- tonic-protobuf-build/README.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tonic-protobuf-build/README.md b/tonic-protobuf-build/README.md index 3e3334e37..60ee13d77 100644 --- a/tonic-protobuf-build/README.md +++ b/tonic-protobuf-build/README.md @@ -85,13 +85,14 @@ you can uncomment the line `.output_dir(...)` above, and in your lib file config a mod like this: ```rust,ignore pub mod generated { - #[path = ""] pub mod helloworld { - #[path = "generated.rs"] - pub mod proto; + pub mod proto { + include!("helloworld/generated.rs"); + } - #[path = "test_grpc.pb.rs"] - pub mod grpc; + pub mod grpc { + include!("helloworld/test_grpc.pb.rs"); + } } } ``` From 4b150c1ee48940ce5bfa589f0b6024e48ff70ef0 Mon Sep 17 00:00:00 2001 From: Arjan Bal Date: Wed, 30 Jul 2025 01:00:24 +0530 Subject: [PATCH 26/26] Export protobuf from tonic-protobuf --- interop/Cargo.toml | 5 +++++ interop/src/client_protobuf.rs | 4 ++-- tonic-protobuf/src/lib.rs | 4 +++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/interop/Cargo.toml b/interop/Cargo.toml index 92493fed3..dfc69aa38 100644 --- a/interop/Cargo.toml +++ b/interop/Cargo.toml @@ -27,6 +27,11 @@ tonic-prost = {path = "../tonic-prost"} tower = "0.5" tracing-subscriber = {version = "0.3"} grpc = {path = "../grpc"} +# TODO: Remove the direct protobuf dependency after updating to version 4.32, +# which includes https://github.com/protocolbuffers/protobuf/pull/22764. +# We also need the protobuf-codegen crate to support configuring the path +# to the protobuf crate used in the generated message code, instead of +# defaulting to `::protobuf`. protobuf = { version = "4.31.1-release" } tonic-protobuf = {path = "../tonic-protobuf"} diff --git a/interop/src/client_protobuf.rs b/interop/src/client_protobuf.rs index 456251e0d..9210ec830 100644 --- a/interop/src/client_protobuf.rs +++ b/interop/src/client_protobuf.rs @@ -27,13 +27,13 @@ use crate::{ grpc_pb::test_service_client::*, grpc_pb::unimplemented_service_client::*, grpc_pb::*, test_assert, TestAssertion, }; -use protobuf::__internal::MatcherEq; -use protobuf::proto; use tokio::sync::mpsc; use tokio_stream::StreamExt; use tonic::async_trait; use tonic::transport::Channel; use tonic::{metadata::MetadataValue, Code, Request, Response, Status}; +use tonic_protobuf::protobuf::__internal::MatcherEq; +use tonic_protobuf::protobuf::proto; pub type TestClient = TestServiceClient; pub type UnimplementedClient = UnimplementedServiceClient; diff --git a/tonic-protobuf/src/lib.rs b/tonic-protobuf/src/lib.rs index 9914325ff..f2e434d1b 100644 --- a/tonic-protobuf/src/lib.rs +++ b/tonic-protobuf/src/lib.rs @@ -23,13 +23,15 @@ */ use bytes::{Buf, BufMut}; -use protobuf::Message; use std::marker::PhantomData; use tonic::{ codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder}, Status, }; +pub use protobuf; +use protobuf::Message; + /// A [`Codec`] that implements `application/grpc+proto` via the protobuf /// library. #[derive(Debug, Clone)]