From 8e9367331aca8a62abce78fabefd3192f1d0aab1 Mon Sep 17 00:00:00 2001 From: Mihail Stoykov Date: Mon, 29 Mar 2021 18:48:29 +0300 Subject: [PATCH 1/2] Maintenance update of google.golang.org/grpc to v1.36.1 from v1.31.1 The changelog includes a lot of bugfixes most of which are about stuff we don't currently use. There is also mention of some proto codec optimization that shows 2-5% improvement. --- go.mod | 6 +- go.sum | 20 +- lib/testutils/httpmultibin/httpmultibin.go | 1 + vendor/github.com/stretchr/testify/LICENSE | 35 +- .../testify/assert/assertion_format.go | 144 +- .../testify/assert/assertion_forward.go | 288 +++- .../testify/assert/assertion_order.go | 309 ++++ .../stretchr/testify/assert/assertions.go | 302 +++- .../testify/assert/forward_assertions.go | 2 +- .../testify/require/forward_requirements.go | 2 +- .../stretchr/testify/require/require.go | 842 +++++++---- .../testify/require/require_forward.go | 288 +++- .../stretchr/testify/require/requirements.go | 4 +- vendor/google.golang.org/grpc/CONTRIBUTING.md | 1 - vendor/google.golang.org/grpc/README.md | 4 +- vendor/google.golang.org/grpc/SECURITY.md | 3 + .../grpc/attributes/attributes.go | 5 +- vendor/google.golang.org/grpc/backoff.go | 5 +- .../grpc/balancer/balancer.go | 4 + .../grpc/balancer/base/balancer.go | 32 +- .../grpc_binarylog_v1/binarylog.pb.go | 1249 +++++++++++------ vendor/google.golang.org/grpc/clientconn.go | 124 +- .../grpc/connectivity/connectivity.go | 16 +- .../grpc/credentials/credentials.go | 19 +- .../google.golang.org/grpc/credentials/tls.go | 44 +- vendor/google.golang.org/grpc/dialoptions.go | 60 +- .../grpc/encoding/encoding.go | 12 +- .../grpc/encoding/proto/proto.go | 70 +- vendor/google.golang.org/grpc/go.mod | 12 +- vendor/google.golang.org/grpc/go.sum | 59 +- .../grpc/grpclog/component.go | 2 +- .../grpc/grpclog/loggerv2.go | 5 +- vendor/google.golang.org/grpc/install_gae.sh | 2 +- vendor/google.golang.org/grpc/interceptor.go | 36 +- .../grpc/internal/binarylog/method_logger.go | 2 +- .../grpc/internal/binarylog/sink.go | 68 +- .../grpc/internal/channelz/funcs.go | 4 +- .../grpc/internal/channelz/logging.go | 4 +- .../grpc/internal/channelz/types.go | 8 +- .../credentials/{go110.go => spiffe.go} | 28 +- .../{gobefore110.go => spiffe_appengine.go} | 4 +- .../credentials}/syscallconn.go | 3 +- .../credentials}/syscallconn_appengine.go | 2 +- .../grpc/internal/credentials/util.go | 50 + .../grpc/internal/grpclog/grpclog.go | 5 +- .../grpc/internal/grpcutil/target.go | 42 +- .../grpc/internal/internal.go | 13 + .../grpc/internal/metadata/metadata.go | 50 + .../grpc/internal/resolver/config_selector.go | 95 ++ .../grpc/internal/resolver/unix/unix.go | 63 + .../internal/serviceconfig/serviceconfig.go | 82 +- .../grpc/internal/syscall/syscall_linux.go | 22 +- .../grpc/internal/syscall/syscall_nonlinux.go | 2 +- .../grpc/internal/transport/http2_client.go | 84 +- .../grpc/internal/transport/http2_server.go | 10 +- .../grpc/internal/transport/http_util.go | 50 +- .../transport/networktype/networktype.go | 46 + .../grpc/{ => internal/transport}/proxy.go | 52 +- .../grpc/internal/transport/transport.go | 2 + vendor/google.golang.org/grpc/preloader.go | 5 +- vendor/google.golang.org/grpc/regenerate.sh | 70 +- .../grpc/resolver/resolver.go | 6 +- .../grpc/resolver_conn_wrapper.go | 2 +- vendor/google.golang.org/grpc/rpc_util.go | 126 +- vendor/google.golang.org/grpc/server.go | 170 ++- .../google.golang.org/grpc/service_config.go | 74 +- .../grpc/serviceconfig/serviceconfig.go | 5 +- .../google.golang.org/grpc/status/status.go | 8 +- vendor/google.golang.org/grpc/stream.go | 41 +- vendor/google.golang.org/grpc/tap/tap.go | 7 +- .../grpc/test/grpc_testing/test.pb.go | 1035 +++++++++----- .../grpc/test/grpc_testing/test_grpc.pb.go | 45 +- vendor/google.golang.org/grpc/version.go | 2 +- vendor/google.golang.org/grpc/vet.sh | 40 +- .../protobuf/encoding/protojson/decode.go | 9 +- .../protobuf/encoding/protojson/encode.go | 7 +- .../encoding/protojson/well_known_types.go | 276 ++-- .../protobuf/encoding/prototext/decode.go | 65 +- .../protobuf/encoding/prototext/encode.go | 12 +- .../protobuf/internal/detectknown/detect.go | 64 - .../protobuf/internal/fieldnum/any_gen.go | 13 - .../protobuf/internal/fieldnum/api_gen.go | 35 - .../internal/fieldnum/descriptor_gen.go | 240 ---- .../protobuf/internal/fieldnum/doc.go | 7 - .../internal/fieldnum/duration_gen.go | 13 - .../protobuf/internal/fieldnum/empty_gen.go | 10 - .../internal/fieldnum/field_mask_gen.go | 12 - .../internal/fieldnum/source_context_gen.go | 12 - .../protobuf/internal/fieldnum/struct_gen.go | 33 - .../internal/fieldnum/timestamp_gen.go | 13 - .../protobuf/internal/fieldnum/type_gen.go | 53 - .../internal/fieldnum/wrappers_gen.go | 52 - .../protobuf/internal/filedesc/build.go | 16 +- .../protobuf/internal/filedesc/desc.go | 5 +- .../protobuf/internal/filedesc/desc_init.go | 62 +- .../protobuf/internal/filedesc/desc_lazy.go | 124 +- .../protobuf/internal/filedesc/desc_list.go | 6 +- .../protobuf/internal/genid/any_gen.go | 34 + .../protobuf/internal/genid/api_gen.go | 106 ++ .../protobuf/internal/genid/descriptor_gen.go | 829 +++++++++++ .../protobuf/internal/genid/doc.go | 11 + .../protobuf/internal/genid/duration_gen.go | 34 + .../protobuf/internal/genid/empty_gen.go | 19 + .../protobuf/internal/genid/field_mask_gen.go | 31 + .../protobuf/internal/genid/goname.go | 25 + .../protobuf/internal/genid/map_entry.go | 16 + .../internal/genid/source_context_gen.go | 31 + .../protobuf/internal/genid/struct_gen.go | 116 ++ .../protobuf/internal/genid/timestamp_gen.go | 34 + .../protobuf/internal/genid/type_gen.go | 184 +++ .../protobuf/internal/genid/wrappers.go | 13 + .../protobuf/internal/genid/wrappers_gen.go | 175 +++ .../protobuf/internal/genname/name.go | 25 - .../protobuf/internal/impl/api_export.go | 7 + .../protobuf/internal/impl/codec_map.go | 5 +- .../protobuf/internal/impl/message.go | 10 +- .../protobuf/internal/impl/validate.go | 5 +- .../protobuf/internal/version/version.go | 2 +- .../protobuf/proto/decode.go | 5 +- .../reflect/protodesc/desc_validate.go | 5 +- .../protobuf/reflect/protoreflect/proto.go | 50 +- .../protobuf/types/dynamicpb/dynamic.go | 38 +- .../protobuf/types/known/anypb/any.pb.go | 207 +++ .../types/known/durationpb/duration.pb.go | 130 ++ .../types/known/fieldmaskpb/field_mask.pb.go | 217 +++ .../types/known/structpb/struct.pb.go | 305 ++++ .../types/known/timestamppb/timestamp.pb.go | 110 ++ .../types/known/wrapperspb/wrappers.pb.go | 45 + vendor/modules.txt | 15 +- 129 files changed, 7795 insertions(+), 2617 deletions(-) create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_order.go create mode 100644 vendor/google.golang.org/grpc/SECURITY.md rename vendor/google.golang.org/grpc/internal/credentials/{go110.go => spiffe.go} (66%) rename vendor/google.golang.org/grpc/internal/credentials/{gobefore110.go => spiffe_appengine.go} (90%) rename vendor/google.golang.org/grpc/{credentials/internal => internal/credentials}/syscallconn.go (96%) rename vendor/google.golang.org/grpc/{credentials/internal => internal/credentials}/syscallconn_appengine.go (97%) create mode 100644 vendor/google.golang.org/grpc/internal/credentials/util.go create mode 100644 vendor/google.golang.org/grpc/internal/metadata/metadata.go create mode 100644 vendor/google.golang.org/grpc/internal/resolver/config_selector.go create mode 100644 vendor/google.golang.org/grpc/internal/resolver/unix/unix.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/networktype/networktype.go rename vendor/google.golang.org/grpc/{ => internal/transport}/proxy.go (73%) delete mode 100644 vendor/google.golang.org/protobuf/internal/detectknown/detect.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/any_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/api_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/descriptor_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/doc.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/duration_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/empty_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/field_mask_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/source_context_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/struct_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/timestamp_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/type_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldnum/wrappers_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/any_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/api_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/doc.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/duration_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/empty_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/goname.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/map_entry.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/struct_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/type_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/wrappers.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go delete mode 100644 vendor/google.golang.org/protobuf/internal/genname/name.go diff --git a/go.mod b/go.mod index d233f89be47..eed170fd78f 100644 --- a/go.mod +++ b/go.mod @@ -61,7 +61,7 @@ require ( github.com/spf13/afero v1.1.1 github.com/spf13/cobra v0.0.4-0.20180629152535-a114f312e075 github.com/spf13/pflag v1.0.1 - github.com/stretchr/testify v1.2.2 + github.com/stretchr/testify v1.5.1 github.com/tidwall/gjson v1.6.1 github.com/tidwall/pretty v1.0.2 github.com/ugorji/go v1.1.7 // indirect @@ -76,8 +76,8 @@ require ( golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/genproto v0.0.0-20200903010400-9bfcb5116336 // indirect - google.golang.org/grpc v1.31.1 - google.golang.org/protobuf v1.24.0 + google.golang.org/grpc v1.36.1 + google.golang.org/protobuf v1.25.0 gopkg.in/go-playground/assert.v1 v1.2.1 // indirect gopkg.in/go-playground/validator.v8 v8.18.2 // indirect gopkg.in/guregu/null.v2 v2.1.2 // indirect diff --git a/go.sum b/go.sum index 10e27fff008..c56606b59b1 100644 --- a/go.sum +++ b/go.sum @@ -20,9 +20,10 @@ github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRy github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/daaku/go.zipexe v0.0.0-20150329023125-a5fe2436ffcb h1:tUf55Po0vzOendQ7NWytcdK0VuzQmfAgvGBUOQvN0WA= github.com/daaku/go.zipexe v0.0.0-20150329023125-a5fe2436ffcb/go.mod h1:U0vRfAucUOohvdCxt5MWLF+TePIL0xbCkbKIiV8TQCE= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 h1:Izz0+t1Z5nI16/II7vuEo/nHjodOg0p7+OiDpjX5t1E= @@ -39,7 +40,7 @@ github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.5.0 h1:vBh+kQp8lg9XPr56u1CPrWjFXtdphMoGWVHr9/1c+A0= github.com/fatih/color v1.5.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= @@ -63,7 +64,6 @@ github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfb github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -78,9 +78,11 @@ github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5a github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1 h1:JFrFEBb2xKufg6XkJsJr+WbKb4FQlURi5RUcBveYu9k= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f h1:9oNbS1z4rVpbnkHBdPZU4jo9bSmrLpII768arSyMFgk= github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= @@ -169,8 +171,10 @@ github.com/spf13/cobra v0.0.4-0.20180629152535-a114f312e075 h1:bfSj+hHTrZKbMJHEl github.com/spf13/cobra v0.0.4-0.20180629152535-a114f312e075/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/pflag v1.0.1 h1:aCvUg6QPl3ibpQUxyLkrEkCHtPqYJL4x9AuhqVqFis4= github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/tidwall/gjson v1.6.1 h1:LRbvNuNuvAiISWg6gxLEFuCe72UKy5hDqhxW/8183ws= github.com/tidwall/gjson v1.6.1/go.mod h1:BaHyNc5bjzYkPqgLq7mdVzeiRtULKULXLgZFKsxEHI0= github.com/tidwall/match v1.0.1 h1:PnKP62LPNxHKTwvHHZZzdOAOCtsJTjo6dZLCwpKm5xc= @@ -265,8 +269,8 @@ google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZi google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.31.1 h1:SfXqXS5hkufcdZ/mHtYCh53P2b+92WQq/DZcKLgsFRs= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.36.1 h1:cmUfbeGKnz9+2DD/UYsMQXeqbHZqZDs4eQwW0sFOpBY= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -275,8 +279,9 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0 h1:UhZDfRO8JRQru4/+LlLE0BRKGF8L+PICnvYZmx/fEGA= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -292,6 +297,7 @@ gopkg.in/guregu/null.v3 v3.3.0 h1:8j3ggqq+NgKt/O7mbFVUFKUMWN+l1AmT5jQmJ6nPh2c= gopkg.in/guregu/null.v3 v3.3.0/go.mod h1:E4tX2Qe3h7QdL+uZ3a0vqvYwKQsRSQKM5V4YltdgH9Y= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/lib/testutils/httpmultibin/httpmultibin.go b/lib/testutils/httpmultibin/httpmultibin.go index 2f85cb5b53f..24ca695cd4c 100644 --- a/lib/testutils/httpmultibin/httpmultibin.go +++ b/lib/testutils/httpmultibin/httpmultibin.go @@ -210,6 +210,7 @@ func getZstdBrHandler(t testing.TB) http.Handler { // GRPCStub is an easily customisable TestServiceServer type GRPCStub struct { + grpctest.TestServiceServer EmptyCallFunc func(context.Context, *grpctest.Empty) (*grpctest.Empty, error) UnaryCallFunc func(context.Context, *grpctest.SimpleRequest) (*grpctest.SimpleResponse, error) } diff --git a/vendor/github.com/stretchr/testify/LICENSE b/vendor/github.com/stretchr/testify/LICENSE index 473b670a7c6..f38ec5956b6 100644 --- a/vendor/github.com/stretchr/testify/LICENSE +++ b/vendor/github.com/stretchr/testify/LICENSE @@ -1,22 +1,21 @@ -Copyright (c) 2012 - 2013 Mat Ryer and Tyler Bunnell +MIT License -Please consider promoting this project if you find it useful. +Copyright (c) 2012-2018 Mat Ryer and Tyler Bunnell -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without restriction, -including without limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of the Software, -and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT -OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE -OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go index aa1c2b95cdd..bf89ecd21f7 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_format.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go @@ -32,7 +32,8 @@ func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args return Contains(t, s, contains, append([]interface{}{msg}, args...)...) } -// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +// DirExistsf checks whether a directory exists in the given path. It also fails +// if the path is a file rather a directory or there is an error checking whether it exists. func DirExistsf(t TestingT, path string, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -113,6 +114,17 @@ func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { return Error(t, err, append([]interface{}{msg}, args...)...) } +// Eventuallyf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Eventually(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) +} + // Exactlyf asserts that two objects are equal in value and type. // // assert.Exactlyf(t, int32(123, "error message %s", "formatted"), int64(123)) @@ -149,7 +161,8 @@ func Falsef(t TestingT, value bool, msg string, args ...interface{}) bool { return False(t, value, append([]interface{}{msg}, args...)...) } -// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +// FileExistsf checks whether a file exists in the given path. It also fails if +// the path points to a directory or there is an error when trying to check the file. func FileExistsf(t TestingT, path string, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -157,6 +170,31 @@ func FileExistsf(t TestingT, path string, msg string, args ...interface{}) bool return FileExists(t, path, append([]interface{}{msg}, args...)...) } +// Greaterf asserts that the first element is greater than the second +// +// assert.Greaterf(t, 2, 1, "error message %s", "formatted") +// assert.Greaterf(t, float64(2, "error message %s", "formatted"), float64(1)) +// assert.Greaterf(t, "b", "a", "error message %s", "formatted") +func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Greater(t, e1, e2, append([]interface{}{msg}, args...)...) +} + +// GreaterOrEqualf asserts that the first element is greater than or equal to the second +// +// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") +func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return GreaterOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...) +} + // HTTPBodyContainsf asserts that a specified handler returns a // body that contains a string. // @@ -231,7 +269,7 @@ func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, ms // InDeltaf asserts that the two numerals are within delta of each other. // -// assert.InDeltaf(t, math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -300,6 +338,42 @@ func Lenf(t TestingT, object interface{}, length int, msg string, args ...interf return Len(t, object, length, append([]interface{}{msg}, args...)...) } +// Lessf asserts that the first element is less than the second +// +// assert.Lessf(t, 1, 2, "error message %s", "formatted") +// assert.Lessf(t, float64(1, "error message %s", "formatted"), float64(2)) +// assert.Lessf(t, "a", "b", "error message %s", "formatted") +func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Less(t, e1, e2, append([]interface{}{msg}, args...)...) +} + +// LessOrEqualf asserts that the first element is less than or equal to the second +// +// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") +func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return LessOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...) +} + +// Neverf asserts that the given condition doesn't satisfy in waitFor time, +// periodically checking the target function each tick. +// +// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Never(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) +} + // Nilf asserts that the specified object is nil. // // assert.Nilf(t, err, "error message %s", "formatted") @@ -310,6 +384,15 @@ func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) bool return Nil(t, object, append([]interface{}{msg}, args...)...) } +// NoDirExistsf checks whether a directory does not exist in the given path. +// It fails if the path points to an existing _directory_ only. +func NoDirExistsf(t TestingT, path string, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return NoDirExists(t, path, append([]interface{}{msg}, args...)...) +} + // NoErrorf asserts that a function returned no error (i.e. `nil`). // // actualObj, err := SomeFunction() @@ -323,6 +406,15 @@ func NoErrorf(t TestingT, err error, msg string, args ...interface{}) bool { return NoError(t, err, append([]interface{}{msg}, args...)...) } +// NoFileExistsf checks whether a file does not exist in a given path. It fails +// if the path points to an existing _file_ only. +func NoFileExistsf(t TestingT, path string, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return NoFileExists(t, path, append([]interface{}{msg}, args...)...) +} + // NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // @@ -393,6 +485,19 @@ func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args .. return NotRegexp(t, rx, str, append([]interface{}{msg}, args...)...) } +// NotSamef asserts that two pointers do not reference the same object. +// +// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return NotSame(t, expected, actual, append([]interface{}{msg}, args...)...) +} + // NotSubsetf asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // @@ -422,6 +527,18 @@ func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool return Panics(t, f, append([]interface{}{msg}, args...)...) } +// PanicsWithErrorf asserts that the code inside the specified PanicTestFunc +// panics, and that the recovered panic value is an error that satisfies the +// EqualError comparison. +// +// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func PanicsWithErrorf(t TestingT, errString string, f PanicTestFunc, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return PanicsWithError(t, errString, f, append([]interface{}{msg}, args...)...) +} + // PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // @@ -444,6 +561,19 @@ func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...in return Regexp(t, rx, str, append([]interface{}{msg}, args...)...) } +// Samef asserts that two pointers reference the same object. +// +// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func Samef(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Same(t, expected, actual, append([]interface{}{msg}, args...)...) +} + // Subsetf asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // @@ -475,6 +605,14 @@ func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta tim return WithinDuration(t, expected, actual, delta, append([]interface{}{msg}, args...)...) } +// YAMLEqf asserts that two YAML strings are equivalent. +func YAMLEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return YAMLEq(t, expected, actual, append([]interface{}{msg}, args...)...) +} + // Zerof asserts that i is the zero value for its type. func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go index de39f794e72..75ecdcaa2f3 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -53,7 +53,8 @@ func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, return Containsf(a.t, s, contains, msg, args...) } -// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +// DirExists checks whether a directory exists in the given path. It also fails +// if the path is a file rather a directory or there is an error checking whether it exists. func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -61,7 +62,8 @@ func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) bool { return DirExists(a.t, path, msgAndArgs...) } -// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +// DirExistsf checks whether a directory exists in the given path. It also fails +// if the path is a file rather a directory or there is an error checking whether it exists. func (a *Assertions) DirExistsf(path string, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -215,6 +217,28 @@ func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { return Errorf(a.t, err, msg, args...) } +// Eventually asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) +func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Eventually(a.t, condition, waitFor, tick, msgAndArgs...) +} + +// Eventuallyf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Eventuallyf(a.t, condition, waitFor, tick, msg, args...) +} + // Exactly asserts that two objects are equal in value and type. // // a.Exactly(int32(123), int64(123)) @@ -287,7 +311,8 @@ func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) bool { return Falsef(a.t, value, msg, args...) } -// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +// FileExists checks whether a file exists in the given path. It also fails if +// the path points to a directory or there is an error when trying to check the file. func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -295,7 +320,8 @@ func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) bool { return FileExists(a.t, path, msgAndArgs...) } -// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +// FileExistsf checks whether a file exists in the given path. It also fails if +// the path points to a directory or there is an error when trying to check the file. func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -303,6 +329,56 @@ func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) b return FileExistsf(a.t, path, msg, args...) } +// Greater asserts that the first element is greater than the second +// +// a.Greater(2, 1) +// a.Greater(float64(2), float64(1)) +// a.Greater("b", "a") +func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Greater(a.t, e1, e2, msgAndArgs...) +} + +// GreaterOrEqual asserts that the first element is greater than or equal to the second +// +// a.GreaterOrEqual(2, 1) +// a.GreaterOrEqual(2, 2) +// a.GreaterOrEqual("b", "a") +// a.GreaterOrEqual("b", "b") +func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return GreaterOrEqual(a.t, e1, e2, msgAndArgs...) +} + +// GreaterOrEqualf asserts that the first element is greater than or equal to the second +// +// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") +// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") +// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") +// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") +func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return GreaterOrEqualf(a.t, e1, e2, msg, args...) +} + +// Greaterf asserts that the first element is greater than the second +// +// a.Greaterf(2, 1, "error message %s", "formatted") +// a.Greaterf(float64(2, "error message %s", "formatted"), float64(1)) +// a.Greaterf("b", "a", "error message %s", "formatted") +func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Greaterf(a.t, e1, e2, msg, args...) +} + // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // @@ -449,7 +525,7 @@ func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{} // InDelta asserts that the two numerals are within delta of each other. // -// a.InDelta(math.Pi, (22 / 7.0), 0.01) +// a.InDelta(math.Pi, 22/7.0, 0.01) func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -491,7 +567,7 @@ func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, del // InDeltaf asserts that the two numerals are within delta of each other. // -// a.InDeltaf(math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -589,6 +665,78 @@ func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...in return Lenf(a.t, object, length, msg, args...) } +// Less asserts that the first element is less than the second +// +// a.Less(1, 2) +// a.Less(float64(1), float64(2)) +// a.Less("a", "b") +func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Less(a.t, e1, e2, msgAndArgs...) +} + +// LessOrEqual asserts that the first element is less than or equal to the second +// +// a.LessOrEqual(1, 2) +// a.LessOrEqual(2, 2) +// a.LessOrEqual("a", "b") +// a.LessOrEqual("b", "b") +func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return LessOrEqual(a.t, e1, e2, msgAndArgs...) +} + +// LessOrEqualf asserts that the first element is less than or equal to the second +// +// a.LessOrEqualf(1, 2, "error message %s", "formatted") +// a.LessOrEqualf(2, 2, "error message %s", "formatted") +// a.LessOrEqualf("a", "b", "error message %s", "formatted") +// a.LessOrEqualf("b", "b", "error message %s", "formatted") +func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return LessOrEqualf(a.t, e1, e2, msg, args...) +} + +// Lessf asserts that the first element is less than the second +// +// a.Lessf(1, 2, "error message %s", "formatted") +// a.Lessf(float64(1, "error message %s", "formatted"), float64(2)) +// a.Lessf("a", "b", "error message %s", "formatted") +func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Lessf(a.t, e1, e2, msg, args...) +} + +// Never asserts that the given condition doesn't satisfy in waitFor time, +// periodically checking the target function each tick. +// +// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) +func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Never(a.t, condition, waitFor, tick, msgAndArgs...) +} + +// Neverf asserts that the given condition doesn't satisfy in waitFor time, +// periodically checking the target function each tick. +// +// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Neverf(a.t, condition, waitFor, tick, msg, args...) +} + // Nil asserts that the specified object is nil. // // a.Nil(err) @@ -609,6 +757,24 @@ func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) b return Nilf(a.t, object, msg, args...) } +// NoDirExists checks whether a directory does not exist in the given path. +// It fails if the path points to an existing _directory_ only. +func (a *Assertions) NoDirExists(path string, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NoDirExists(a.t, path, msgAndArgs...) +} + +// NoDirExistsf checks whether a directory does not exist in the given path. +// It fails if the path points to an existing _directory_ only. +func (a *Assertions) NoDirExistsf(path string, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NoDirExistsf(a.t, path, msg, args...) +} + // NoError asserts that a function returned no error (i.e. `nil`). // // actualObj, err := SomeFunction() @@ -635,6 +801,24 @@ func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) bool { return NoErrorf(a.t, err, msg, args...) } +// NoFileExists checks whether a file does not exist in a given path. It fails +// if the path points to an existing _file_ only. +func (a *Assertions) NoFileExists(path string, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NoFileExists(a.t, path, msgAndArgs...) +} + +// NoFileExistsf checks whether a file does not exist in a given path. It fails +// if the path points to an existing _file_ only. +func (a *Assertions) NoFileExistsf(path string, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NoFileExistsf(a.t, path, msg, args...) +} + // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // @@ -775,6 +959,32 @@ func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, arg return NotRegexpf(a.t, rx, str, msg, args...) } +// NotSame asserts that two pointers do not reference the same object. +// +// a.NotSame(ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) NotSame(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NotSame(a.t, expected, actual, msgAndArgs...) +} + +// NotSamef asserts that two pointers do not reference the same object. +// +// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return NotSamef(a.t, expected, actual, msg, args...) +} + // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // @@ -823,6 +1033,30 @@ func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool { return Panics(a.t, f, msgAndArgs...) } +// PanicsWithError asserts that the code inside the specified PanicTestFunc +// panics, and that the recovered panic value is an error that satisfies the +// EqualError comparison. +// +// a.PanicsWithError("crazy error", func(){ GoCrazy() }) +func (a *Assertions) PanicsWithError(errString string, f PanicTestFunc, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return PanicsWithError(a.t, errString, f, msgAndArgs...) +} + +// PanicsWithErrorf asserts that the code inside the specified PanicTestFunc +// panics, and that the recovered panic value is an error that satisfies the +// EqualError comparison. +// +// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func (a *Assertions) PanicsWithErrorf(errString string, f PanicTestFunc, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return PanicsWithErrorf(a.t, errString, f, msg, args...) +} + // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // @@ -877,6 +1111,32 @@ func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args . return Regexpf(a.t, rx, str, msg, args...) } +// Same asserts that two pointers reference the same object. +// +// a.Same(ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) Same(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Same(a.t, expected, actual, msgAndArgs...) +} + +// Samef asserts that two pointers reference the same object. +// +// a.Samef(ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Samef(a.t, expected, actual, msg, args...) +} + // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // @@ -939,6 +1199,22 @@ func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta return WithinDurationf(a.t, expected, actual, delta, msg, args...) } +// YAMLEq asserts that two YAML strings are equivalent. +func (a *Assertions) YAMLEq(expected string, actual string, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return YAMLEq(a.t, expected, actual, msgAndArgs...) +} + +// YAMLEqf asserts that two YAML strings are equivalent. +func (a *Assertions) YAMLEqf(expected string, actual string, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return YAMLEqf(a.t, expected, actual, msg, args...) +} + // Zero asserts that i is the zero value for its type. func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { diff --git a/vendor/github.com/stretchr/testify/assert/assertion_order.go b/vendor/github.com/stretchr/testify/assert/assertion_order.go new file mode 100644 index 00000000000..15a486ca6e2 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertion_order.go @@ -0,0 +1,309 @@ +package assert + +import ( + "fmt" + "reflect" +) + +func compare(obj1, obj2 interface{}, kind reflect.Kind) (int, bool) { + switch kind { + case reflect.Int: + { + intobj1 := obj1.(int) + intobj2 := obj2.(int) + if intobj1 > intobj2 { + return -1, true + } + if intobj1 == intobj2 { + return 0, true + } + if intobj1 < intobj2 { + return 1, true + } + } + case reflect.Int8: + { + int8obj1 := obj1.(int8) + int8obj2 := obj2.(int8) + if int8obj1 > int8obj2 { + return -1, true + } + if int8obj1 == int8obj2 { + return 0, true + } + if int8obj1 < int8obj2 { + return 1, true + } + } + case reflect.Int16: + { + int16obj1 := obj1.(int16) + int16obj2 := obj2.(int16) + if int16obj1 > int16obj2 { + return -1, true + } + if int16obj1 == int16obj2 { + return 0, true + } + if int16obj1 < int16obj2 { + return 1, true + } + } + case reflect.Int32: + { + int32obj1 := obj1.(int32) + int32obj2 := obj2.(int32) + if int32obj1 > int32obj2 { + return -1, true + } + if int32obj1 == int32obj2 { + return 0, true + } + if int32obj1 < int32obj2 { + return 1, true + } + } + case reflect.Int64: + { + int64obj1 := obj1.(int64) + int64obj2 := obj2.(int64) + if int64obj1 > int64obj2 { + return -1, true + } + if int64obj1 == int64obj2 { + return 0, true + } + if int64obj1 < int64obj2 { + return 1, true + } + } + case reflect.Uint: + { + uintobj1 := obj1.(uint) + uintobj2 := obj2.(uint) + if uintobj1 > uintobj2 { + return -1, true + } + if uintobj1 == uintobj2 { + return 0, true + } + if uintobj1 < uintobj2 { + return 1, true + } + } + case reflect.Uint8: + { + uint8obj1 := obj1.(uint8) + uint8obj2 := obj2.(uint8) + if uint8obj1 > uint8obj2 { + return -1, true + } + if uint8obj1 == uint8obj2 { + return 0, true + } + if uint8obj1 < uint8obj2 { + return 1, true + } + } + case reflect.Uint16: + { + uint16obj1 := obj1.(uint16) + uint16obj2 := obj2.(uint16) + if uint16obj1 > uint16obj2 { + return -1, true + } + if uint16obj1 == uint16obj2 { + return 0, true + } + if uint16obj1 < uint16obj2 { + return 1, true + } + } + case reflect.Uint32: + { + uint32obj1 := obj1.(uint32) + uint32obj2 := obj2.(uint32) + if uint32obj1 > uint32obj2 { + return -1, true + } + if uint32obj1 == uint32obj2 { + return 0, true + } + if uint32obj1 < uint32obj2 { + return 1, true + } + } + case reflect.Uint64: + { + uint64obj1 := obj1.(uint64) + uint64obj2 := obj2.(uint64) + if uint64obj1 > uint64obj2 { + return -1, true + } + if uint64obj1 == uint64obj2 { + return 0, true + } + if uint64obj1 < uint64obj2 { + return 1, true + } + } + case reflect.Float32: + { + float32obj1 := obj1.(float32) + float32obj2 := obj2.(float32) + if float32obj1 > float32obj2 { + return -1, true + } + if float32obj1 == float32obj2 { + return 0, true + } + if float32obj1 < float32obj2 { + return 1, true + } + } + case reflect.Float64: + { + float64obj1 := obj1.(float64) + float64obj2 := obj2.(float64) + if float64obj1 > float64obj2 { + return -1, true + } + if float64obj1 == float64obj2 { + return 0, true + } + if float64obj1 < float64obj2 { + return 1, true + } + } + case reflect.String: + { + stringobj1 := obj1.(string) + stringobj2 := obj2.(string) + if stringobj1 > stringobj2 { + return -1, true + } + if stringobj1 == stringobj2 { + return 0, true + } + if stringobj1 < stringobj2 { + return 1, true + } + } + } + + return 0, false +} + +// Greater asserts that the first element is greater than the second +// +// assert.Greater(t, 2, 1) +// assert.Greater(t, float64(2), float64(1)) +// assert.Greater(t, "b", "a") +func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + e1Kind := reflect.ValueOf(e1).Kind() + e2Kind := reflect.ValueOf(e2).Kind() + if e1Kind != e2Kind { + return Fail(t, "Elements should be the same type", msgAndArgs...) + } + + res, isComparable := compare(e1, e2, e1Kind) + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) + } + + if res != -1 { + return Fail(t, fmt.Sprintf("\"%v\" is not greater than \"%v\"", e1, e2), msgAndArgs...) + } + + return true +} + +// GreaterOrEqual asserts that the first element is greater than or equal to the second +// +// assert.GreaterOrEqual(t, 2, 1) +// assert.GreaterOrEqual(t, 2, 2) +// assert.GreaterOrEqual(t, "b", "a") +// assert.GreaterOrEqual(t, "b", "b") +func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + e1Kind := reflect.ValueOf(e1).Kind() + e2Kind := reflect.ValueOf(e2).Kind() + if e1Kind != e2Kind { + return Fail(t, "Elements should be the same type", msgAndArgs...) + } + + res, isComparable := compare(e1, e2, e1Kind) + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) + } + + if res != -1 && res != 0 { + return Fail(t, fmt.Sprintf("\"%v\" is not greater than or equal to \"%v\"", e1, e2), msgAndArgs...) + } + + return true +} + +// Less asserts that the first element is less than the second +// +// assert.Less(t, 1, 2) +// assert.Less(t, float64(1), float64(2)) +// assert.Less(t, "a", "b") +func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + e1Kind := reflect.ValueOf(e1).Kind() + e2Kind := reflect.ValueOf(e2).Kind() + if e1Kind != e2Kind { + return Fail(t, "Elements should be the same type", msgAndArgs...) + } + + res, isComparable := compare(e1, e2, e1Kind) + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) + } + + if res != 1 { + return Fail(t, fmt.Sprintf("\"%v\" is not less than \"%v\"", e1, e2), msgAndArgs...) + } + + return true +} + +// LessOrEqual asserts that the first element is less than or equal to the second +// +// assert.LessOrEqual(t, 1, 2) +// assert.LessOrEqual(t, 2, 2) +// assert.LessOrEqual(t, "a", "b") +// assert.LessOrEqual(t, "b", "b") +func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + e1Kind := reflect.ValueOf(e1).Kind() + e2Kind := reflect.ValueOf(e2).Kind() + if e1Kind != e2Kind { + return Fail(t, "Elements should be the same type", msgAndArgs...) + } + + res, isComparable := compare(e1, e2, e1Kind) + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) + } + + if res != 1 && res != 0 { + return Fail(t, fmt.Sprintf("\"%v\" is not less than or equal to \"%v\"", e1, e2), msgAndArgs...) + } + + return true +} diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go index 5bdec56cd83..bdd81389a97 100644 --- a/vendor/github.com/stretchr/testify/assert/assertions.go +++ b/vendor/github.com/stretchr/testify/assert/assertions.go @@ -11,6 +11,7 @@ import ( "reflect" "regexp" "runtime" + "runtime/debug" "strings" "time" "unicode" @@ -18,9 +19,10 @@ import ( "github.com/davecgh/go-spew/spew" "github.com/pmezard/go-difflib/difflib" + yaml "gopkg.in/yaml.v2" ) -//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_format.go.tmpl +//go:generate sh -c "cd ../_codegen && go build && cd - && ../_codegen/_codegen -output-package=assert -template=assertion_format.go.tmpl" // TestingT is an interface wrapper around *testing.T type TestingT interface { @@ -39,7 +41,7 @@ type ValueAssertionFunc func(TestingT, interface{}, ...interface{}) bool // for table driven tests. type BoolAssertionFunc func(TestingT, bool, ...interface{}) bool -// ValuesAssertionFunc is a common function prototype when validating an error value. Can be useful +// ErrorAssertionFunc is a common function prototype when validating an error value. Can be useful // for table driven tests. type ErrorAssertionFunc func(TestingT, error, ...interface{}) bool @@ -179,7 +181,11 @@ func messageFromMsgAndArgs(msgAndArgs ...interface{}) string { return "" } if len(msgAndArgs) == 1 { - return msgAndArgs[0].(string) + msg := msgAndArgs[0] + if msgAsStr, ok := msg.(string); ok { + return msgAsStr + } + return fmt.Sprintf("%+v", msg) } if len(msgAndArgs) > 1 { return fmt.Sprintf(msgAndArgs[0].(string), msgAndArgs[1:]...) @@ -346,6 +352,75 @@ func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) } +// validateEqualArgs checks whether provided arguments can be safely used in the +// Equal/NotEqual functions. +func validateEqualArgs(expected, actual interface{}) error { + if expected == nil && actual == nil { + return nil + } + + if isFunction(expected) || isFunction(actual) { + return errors.New("cannot take func type as argument") + } + return nil +} + +// Same asserts that two pointers reference the same object. +// +// assert.Same(t, ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func Same(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + if !samePointers(expected, actual) { + return Fail(t, fmt.Sprintf("Not same: \n"+ + "expected: %p %#v\n"+ + "actual : %p %#v", expected, expected, actual, actual), msgAndArgs...) + } + + return true +} + +// NotSame asserts that two pointers do not reference the same object. +// +// assert.NotSame(t, ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func NotSame(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + if samePointers(expected, actual) { + return Fail(t, fmt.Sprintf( + "Expected and actual point to the same object: %p %#v", + expected, expected), msgAndArgs...) + } + return true +} + +// samePointers compares two generic interface objects and returns whether +// they point to the same object +func samePointers(first, second interface{}) bool { + firstPtr, secondPtr := reflect.ValueOf(first), reflect.ValueOf(second) + if firstPtr.Kind() != reflect.Ptr || secondPtr.Kind() != reflect.Ptr { + return false + } + + firstType, secondType := reflect.TypeOf(first), reflect.TypeOf(second) + if firstType != secondType { + return false + } + + // compare pointer addresses + return first == second +} + // formatUnequalValues takes two values of arbitrary types and returns string // representations appropriate to be presented to the user. // @@ -357,9 +432,11 @@ func formatUnequalValues(expected, actual interface{}) (e string, a string) { return fmt.Sprintf("%T(%#v)", expected, expected), fmt.Sprintf("%T(%#v)", actual, actual) } - - return fmt.Sprintf("%#v", expected), - fmt.Sprintf("%#v", actual) + switch expected.(type) { + case time.Duration: + return fmt.Sprintf("%v", expected), fmt.Sprintf("%v", actual) + } + return fmt.Sprintf("%#v", expected), fmt.Sprintf("%#v", actual) } // EqualValues asserts that two objects are equal or convertable to the same types @@ -415,6 +492,17 @@ func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return Fail(t, "Expected value not to be nil.", msgAndArgs...) } +// containsKind checks if a specified kind in the slice of kinds. +func containsKind(kinds []reflect.Kind, kind reflect.Kind) bool { + for i := 0; i < len(kinds); i++ { + if kind == kinds[i] { + return true + } + } + + return false +} + // isNil checks if a specified object is nil or not, without Failing. func isNil(object interface{}) bool { if object == nil { @@ -423,7 +511,14 @@ func isNil(object interface{}) bool { value := reflect.ValueOf(object) kind := value.Kind() - if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() { + isNilableKind := containsKind( + []reflect.Kind{ + reflect.Chan, reflect.Func, + reflect.Interface, reflect.Map, + reflect.Ptr, reflect.Slice}, + kind) + + if isNilableKind && value.IsNil() { return true } @@ -457,14 +552,14 @@ func isEmpty(object interface{}) bool { // collection types are empty when they have no element case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: return objValue.Len() == 0 - // pointers are empty if nil or if the value they point to is empty + // pointers are empty if nil or if the value they point to is empty case reflect.Ptr: if objValue.IsNil() { return true } deref := objValue.Elem().Interface() return isEmpty(deref) - // for all other types, compare against the zero value + // for all other types, compare against the zero value default: zero := reflect.Zero(objValue.Type()) return reflect.DeepEqual(object, zero.Interface()) @@ -607,7 +702,7 @@ func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{ func includeElement(list interface{}, element interface{}) (ok, found bool) { listValue := reflect.ValueOf(list) - elementValue := reflect.ValueOf(element) + listKind := reflect.TypeOf(list).Kind() defer func() { if e := recover(); e != nil { ok = false @@ -615,11 +710,12 @@ func includeElement(list interface{}, element interface{}) (ok, found bool) { } }() - if reflect.TypeOf(list).Kind() == reflect.String { + if listKind == reflect.String { + elementValue := reflect.ValueOf(element) return true, strings.Contains(listValue.String(), elementValue.String()) } - if reflect.TypeOf(list).Kind() == reflect.Map { + if listKind == reflect.Map { mapKeys := listValue.MapKeys() for i := 0; i < len(mapKeys); i++ { if ObjectsAreEqual(mapKeys[i].Interface(), element) { @@ -846,15 +942,17 @@ func Condition(t TestingT, comp Comparison, msgAndArgs ...interface{}) bool { type PanicTestFunc func() // didPanic returns true if the function passed to it panics. Otherwise, it returns false. -func didPanic(f PanicTestFunc) (bool, interface{}) { +func didPanic(f PanicTestFunc) (bool, interface{}, string) { didPanic := false var message interface{} + var stack string func() { defer func() { if message = recover(); message != nil { didPanic = true + stack = string(debug.Stack()) } }() @@ -863,7 +961,7 @@ func didPanic(f PanicTestFunc) (bool, interface{}) { }() - return didPanic, message + return didPanic, message, stack } @@ -875,7 +973,7 @@ func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { h.Helper() } - if funcDidPanic, panicValue := didPanic(f); !funcDidPanic { + if funcDidPanic, panicValue, _ := didPanic(f); !funcDidPanic { return Fail(t, fmt.Sprintf("func %#v should panic\n\tPanic value:\t%#v", f, panicValue), msgAndArgs...) } @@ -891,12 +989,34 @@ func PanicsWithValue(t TestingT, expected interface{}, f PanicTestFunc, msgAndAr h.Helper() } - funcDidPanic, panicValue := didPanic(f) + funcDidPanic, panicValue, panickedStack := didPanic(f) if !funcDidPanic { return Fail(t, fmt.Sprintf("func %#v should panic\n\tPanic value:\t%#v", f, panicValue), msgAndArgs...) } if panicValue != expected { - return Fail(t, fmt.Sprintf("func %#v should panic with value:\t%#v\n\tPanic value:\t%#v", f, expected, panicValue), msgAndArgs...) + return Fail(t, fmt.Sprintf("func %#v should panic with value:\t%#v\n\tPanic value:\t%#v\n\tPanic stack:\t%s", f, expected, panicValue, panickedStack), msgAndArgs...) + } + + return true +} + +// PanicsWithError asserts that the code inside the specified PanicTestFunc +// panics, and that the recovered panic value is an error that satisfies the +// EqualError comparison. +// +// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) +func PanicsWithError(t TestingT, errString string, f PanicTestFunc, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + funcDidPanic, panicValue, panickedStack := didPanic(f) + if !funcDidPanic { + return Fail(t, fmt.Sprintf("func %#v should panic\n\tPanic value:\t%#v", f, panicValue), msgAndArgs...) + } + panicErr, ok := panicValue.(error) + if !ok || panicErr.Error() != errString { + return Fail(t, fmt.Sprintf("func %#v should panic with error message:\t%#v\n\tPanic value:\t%#v\n\tPanic stack:\t%s", f, errString, panicValue, panickedStack), msgAndArgs...) } return true @@ -910,8 +1030,8 @@ func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { h.Helper() } - if funcDidPanic, panicValue := didPanic(f); funcDidPanic { - return Fail(t, fmt.Sprintf("func %#v should not panic\n\tPanic value:\t%v", f, panicValue), msgAndArgs...) + if funcDidPanic, panicValue, panickedStack := didPanic(f); funcDidPanic { + return Fail(t, fmt.Sprintf("func %#v should not panic\n\tPanic value:\t%v\n\tPanic stack:\t%s", f, panicValue, panickedStack), msgAndArgs...) } return true @@ -971,7 +1091,7 @@ func toFloat(x interface{}) (float64, bool) { // InDelta asserts that the two numerals are within delta of each other. // -// assert.InDelta(t, math.Pi, (22 / 7.0), 0.01) +// assert.InDelta(t, math.Pi, 22/7.0, 0.01) func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1259,7 +1379,8 @@ func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool { return true } -// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +// FileExists checks whether a file exists in the given path. It also fails if +// the path points to a directory or there is an error when trying to check the file. func FileExists(t TestingT, path string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1277,7 +1398,24 @@ func FileExists(t TestingT, path string, msgAndArgs ...interface{}) bool { return true } -// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +// NoFileExists checks whether a file does not exist in a given path. It fails +// if the path points to an existing _file_ only. +func NoFileExists(t TestingT, path string, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + info, err := os.Lstat(path) + if err != nil { + return true + } + if info.IsDir() { + return true + } + return Fail(t, fmt.Sprintf("file %q exists", path), msgAndArgs...) +} + +// DirExists checks whether a directory exists in the given path. It also fails +// if the path is a file rather a directory or there is an error checking whether it exists. func DirExists(t TestingT, path string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1295,6 +1433,25 @@ func DirExists(t TestingT, path string, msgAndArgs ...interface{}) bool { return true } +// NoDirExists checks whether a directory does not exist in the given path. +// It fails if the path points to an existing _directory_ only. +func NoDirExists(t TestingT, path string, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + info, err := os.Lstat(path) + if err != nil { + if os.IsNotExist(err) { + return true + } + return true + } + if !info.IsDir() { + return true + } + return Fail(t, fmt.Sprintf("directory %q exists", path), msgAndArgs...) +} + // JSONEq asserts that two JSON strings are equivalent. // // assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) @@ -1315,6 +1472,24 @@ func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{ return Equal(t, expectedJSONAsInterface, actualJSONAsInterface, msgAndArgs...) } +// YAMLEq asserts that two YAML strings are equivalent. +func YAMLEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + var expectedYAMLAsInterface, actualYAMLAsInterface interface{} + + if err := yaml.Unmarshal([]byte(expected), &expectedYAMLAsInterface); err != nil { + return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid yaml.\nYAML parsing error: '%s'", expected, err.Error()), msgAndArgs...) + } + + if err := yaml.Unmarshal([]byte(actual), &actualYAMLAsInterface); err != nil { + return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid yaml.\nYAML error: '%s'", actual, err.Error()), msgAndArgs...) + } + + return Equal(t, expectedYAMLAsInterface, actualYAMLAsInterface, msgAndArgs...) +} + func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) { t := reflect.TypeOf(v) k := t.Kind() @@ -1327,7 +1502,7 @@ func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) { } // diff returns a diff of both values as long as both are of the same type and -// are a struct, map, slice or array. Otherwise it returns an empty string. +// are a struct, map, slice, array or string. Otherwise it returns an empty string. func diff(expected interface{}, actual interface{}) string { if expected == nil || actual == nil { return "" @@ -1345,12 +1520,12 @@ func diff(expected interface{}, actual interface{}) string { } var e, a string - if ek != reflect.String { + if et != reflect.TypeOf("") { e = spewConfig.Sdump(expected) a = spewConfig.Sdump(actual) } else { - e = expected.(string) - a = actual.(string) + e = reflect.ValueOf(expected).String() + a = reflect.ValueOf(actual).String() } diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ @@ -1366,15 +1541,6 @@ func diff(expected interface{}, actual interface{}) string { return "\n\nDiff:\n" + diff } -// validateEqualArgs checks whether provided arguments can be safely used in the -// Equal/NotEqual functions. -func validateEqualArgs(expected, actual interface{}) error { - if isFunction(expected) || isFunction(actual) { - return errors.New("cannot take func type as argument") - } - return nil -} - func isFunction(arg interface{}) bool { if arg == nil { return false @@ -1392,3 +1558,69 @@ var spewConfig = spew.ConfigState{ type tHelper interface { Helper() } + +// Eventually asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) +func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + ch := make(chan bool, 1) + + timer := time.NewTimer(waitFor) + defer timer.Stop() + + ticker := time.NewTicker(tick) + defer ticker.Stop() + + for tick := ticker.C; ; { + select { + case <-timer.C: + return Fail(t, "Condition never satisfied", msgAndArgs...) + case <-tick: + tick = nil + go func() { ch <- condition() }() + case v := <-ch: + if v { + return true + } + tick = ticker.C + } + } +} + +// Never asserts that the given condition doesn't satisfy in waitFor time, +// periodically checking the target function each tick. +// +// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) +func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + ch := make(chan bool, 1) + + timer := time.NewTimer(waitFor) + defer timer.Stop() + + ticker := time.NewTicker(tick) + defer ticker.Stop() + + for tick := ticker.C; ; { + select { + case <-timer.C: + return true + case <-tick: + tick = nil + go func() { ch <- condition() }() + case v := <-ch: + if v { + return Fail(t, "Condition satisfied", msgAndArgs...) + } + tick = ticker.C + } + } +} diff --git a/vendor/github.com/stretchr/testify/assert/forward_assertions.go b/vendor/github.com/stretchr/testify/assert/forward_assertions.go index 9ad56851d97..df189d2348f 100644 --- a/vendor/github.com/stretchr/testify/assert/forward_assertions.go +++ b/vendor/github.com/stretchr/testify/assert/forward_assertions.go @@ -13,4 +13,4 @@ func New(t TestingT) *Assertions { } } -//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_forward.go.tmpl -include-format-funcs +//go:generate sh -c "cd ../_codegen && go build && cd - && ../_codegen/_codegen -output-package=assert -template=assertion_forward.go.tmpl -include-format-funcs" diff --git a/vendor/github.com/stretchr/testify/require/forward_requirements.go b/vendor/github.com/stretchr/testify/require/forward_requirements.go index ac71d40581b..1dcb2338c4c 100644 --- a/vendor/github.com/stretchr/testify/require/forward_requirements.go +++ b/vendor/github.com/stretchr/testify/require/forward_requirements.go @@ -13,4 +13,4 @@ func New(t TestingT) *Assertions { } } -//go:generate go run ../_codegen/main.go -output-package=require -template=require_forward.go.tmpl -include-format-funcs +//go:generate sh -c "cd ../_codegen && go build && cd - && ../_codegen/_codegen -output-package=require -template=require_forward.go.tmpl -include-format-funcs" diff --git a/vendor/github.com/stretchr/testify/require/require.go b/vendor/github.com/stretchr/testify/require/require.go index 535f293490c..cf6c7b56645 100644 --- a/vendor/github.com/stretchr/testify/require/require.go +++ b/vendor/github.com/stretchr/testify/require/require.go @@ -14,23 +14,23 @@ import ( // Condition uses a Comparison to assert a complex condition. func Condition(t TestingT, comp assert.Comparison, msgAndArgs ...interface{}) { - if assert.Condition(t, comp, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Condition(t, comp, msgAndArgs...) { + return + } t.FailNow() } // Conditionf uses a Comparison to assert a complex condition. func Conditionf(t TestingT, comp assert.Comparison, msg string, args ...interface{}) { - if assert.Conditionf(t, comp, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Conditionf(t, comp, msg, args...) { + return + } t.FailNow() } @@ -41,12 +41,12 @@ func Conditionf(t TestingT, comp assert.Comparison, msg string, args ...interfac // assert.Contains(t, ["Hello", "World"], "World") // assert.Contains(t, {"Hello": "World"}, "Hello") func Contains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) { - if assert.Contains(t, s, contains, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Contains(t, s, contains, msgAndArgs...) { + return + } t.FailNow() } @@ -57,34 +57,36 @@ func Contains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...int // assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") // assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) { - if assert.Containsf(t, s, contains, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Containsf(t, s, contains, msg, args...) { + return + } t.FailNow() } -// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +// DirExists checks whether a directory exists in the given path. It also fails +// if the path is a file rather a directory or there is an error checking whether it exists. func DirExists(t TestingT, path string, msgAndArgs ...interface{}) { - if assert.DirExists(t, path, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.DirExists(t, path, msgAndArgs...) { + return + } t.FailNow() } -// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +// DirExistsf checks whether a directory exists in the given path. It also fails +// if the path is a file rather a directory or there is an error checking whether it exists. func DirExistsf(t TestingT, path string, msg string, args ...interface{}) { - if assert.DirExistsf(t, path, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.DirExistsf(t, path, msg, args...) { + return + } t.FailNow() } @@ -94,12 +96,12 @@ func DirExistsf(t TestingT, path string, msg string, args ...interface{}) { // // assert.ElementsMatch(t, [1, 3, 2, 3], [1, 3, 3, 2]) func ElementsMatch(t TestingT, listA interface{}, listB interface{}, msgAndArgs ...interface{}) { - if assert.ElementsMatch(t, listA, listB, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.ElementsMatch(t, listA, listB, msgAndArgs...) { + return + } t.FailNow() } @@ -109,12 +111,12 @@ func ElementsMatch(t TestingT, listA interface{}, listB interface{}, msgAndArgs // // assert.ElementsMatchf(t, [1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string, args ...interface{}) { - if assert.ElementsMatchf(t, listA, listB, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.ElementsMatchf(t, listA, listB, msg, args...) { + return + } t.FailNow() } @@ -123,12 +125,12 @@ func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string // // assert.Empty(t, obj) func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) { - if assert.Empty(t, object, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Empty(t, object, msgAndArgs...) { + return + } t.FailNow() } @@ -137,12 +139,12 @@ func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) { // // assert.Emptyf(t, obj, "error message %s", "formatted") func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) { - if assert.Emptyf(t, object, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Emptyf(t, object, msg, args...) { + return + } t.FailNow() } @@ -154,12 +156,12 @@ func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) { // referenced values (as opposed to the memory addresses). Function equality // cannot be determined and will always fail. func Equal(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { - if assert.Equal(t, expected, actual, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Equal(t, expected, actual, msgAndArgs...) { + return + } t.FailNow() } @@ -169,12 +171,12 @@ func Equal(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...i // actualObj, err := SomeFunction() // assert.EqualError(t, err, expectedErrorString) func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) { - if assert.EqualError(t, theError, errString, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.EqualError(t, theError, errString, msgAndArgs...) { + return + } t.FailNow() } @@ -184,12 +186,12 @@ func EqualError(t TestingT, theError error, errString string, msgAndArgs ...inte // actualObj, err := SomeFunction() // assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) { - if assert.EqualErrorf(t, theError, errString, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.EqualErrorf(t, theError, errString, msg, args...) { + return + } t.FailNow() } @@ -198,12 +200,12 @@ func EqualErrorf(t TestingT, theError error, errString string, msg string, args // // assert.EqualValues(t, uint32(123), int32(123)) func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { - if assert.EqualValues(t, expected, actual, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.EqualValues(t, expected, actual, msgAndArgs...) { + return + } t.FailNow() } @@ -212,12 +214,12 @@ func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArg // // assert.EqualValuesf(t, uint32(123, "error message %s", "formatted"), int32(123)) func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { - if assert.EqualValuesf(t, expected, actual, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.EqualValuesf(t, expected, actual, msg, args...) { + return + } t.FailNow() } @@ -229,12 +231,12 @@ func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg stri // referenced values (as opposed to the memory addresses). Function equality // cannot be determined and will always fail. func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { - if assert.Equalf(t, expected, actual, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Equalf(t, expected, actual, msg, args...) { + return + } t.FailNow() } @@ -245,12 +247,12 @@ func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, ar // assert.Equal(t, expectedError, err) // } func Error(t TestingT, err error, msgAndArgs ...interface{}) { - if assert.Error(t, err, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Error(t, err, msgAndArgs...) { + return + } t.FailNow() } @@ -261,12 +263,40 @@ func Error(t TestingT, err error, msgAndArgs ...interface{}) { // assert.Equal(t, expectedErrorf, err) // } func Errorf(t TestingT, err error, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.Errorf(t, err, msg, args...) { return } + t.FailNow() +} + +// Eventually asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) +func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.Eventually(t, condition, waitFor, tick, msgAndArgs...) { + return + } + t.FailNow() +} + +// Eventuallyf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Eventuallyf(t, condition, waitFor, tick, msg, args...) { + return + } t.FailNow() } @@ -274,12 +304,12 @@ func Errorf(t TestingT, err error, msg string, args ...interface{}) { // // assert.Exactly(t, int32(123), int64(123)) func Exactly(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { - if assert.Exactly(t, expected, actual, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Exactly(t, expected, actual, msgAndArgs...) { + return + } t.FailNow() } @@ -287,56 +317,56 @@ func Exactly(t TestingT, expected interface{}, actual interface{}, msgAndArgs .. // // assert.Exactlyf(t, int32(123, "error message %s", "formatted"), int64(123)) func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { - if assert.Exactlyf(t, expected, actual, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Exactlyf(t, expected, actual, msg, args...) { + return + } t.FailNow() } // Fail reports a failure through func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) { - if assert.Fail(t, failureMessage, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Fail(t, failureMessage, msgAndArgs...) { + return + } t.FailNow() } // FailNow fails test func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) { - if assert.FailNow(t, failureMessage, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.FailNow(t, failureMessage, msgAndArgs...) { + return + } t.FailNow() } // FailNowf fails test func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{}) { - if assert.FailNowf(t, failureMessage, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.FailNowf(t, failureMessage, msg, args...) { + return + } t.FailNow() } // Failf reports a failure through func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) { - if assert.Failf(t, failureMessage, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Failf(t, failureMessage, msg, args...) { + return + } t.FailNow() } @@ -344,12 +374,12 @@ func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) { // // assert.False(t, myBool) func False(t TestingT, value bool, msgAndArgs ...interface{}) { - if assert.False(t, value, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.False(t, value, msgAndArgs...) { + return + } t.FailNow() } @@ -357,34 +387,98 @@ func False(t TestingT, value bool, msgAndArgs ...interface{}) { // // assert.Falsef(t, myBool, "error message %s", "formatted") func Falsef(t TestingT, value bool, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.Falsef(t, value, msg, args...) { return } + t.FailNow() +} + +// FileExists checks whether a file exists in the given path. It also fails if +// the path points to a directory or there is an error when trying to check the file. +func FileExists(t TestingT, path string, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.FileExists(t, path, msgAndArgs...) { + return + } t.FailNow() } -// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. -func FileExists(t TestingT, path string, msgAndArgs ...interface{}) { - if assert.FileExists(t, path, msgAndArgs...) { +// FileExistsf checks whether a file exists in the given path. It also fails if +// the path points to a directory or there is an error when trying to check the file. +func FileExistsf(t TestingT, path string, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.FileExistsf(t, path, msg, args...) { return } + t.FailNow() +} + +// Greater asserts that the first element is greater than the second +// +// assert.Greater(t, 2, 1) +// assert.Greater(t, float64(2), float64(1)) +// assert.Greater(t, "b", "a") +func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Greater(t, e1, e2, msgAndArgs...) { + return + } t.FailNow() } -// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. -func FileExistsf(t TestingT, path string, msg string, args ...interface{}) { - if assert.FileExistsf(t, path, msg, args...) { +// GreaterOrEqual asserts that the first element is greater than or equal to the second +// +// assert.GreaterOrEqual(t, 2, 1) +// assert.GreaterOrEqual(t, 2, 2) +// assert.GreaterOrEqual(t, "b", "a") +// assert.GreaterOrEqual(t, "b", "b") +func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.GreaterOrEqual(t, e1, e2, msgAndArgs...) { + return + } + t.FailNow() +} + +// GreaterOrEqualf asserts that the first element is greater than or equal to the second +// +// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") +func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.GreaterOrEqualf(t, e1, e2, msg, args...) { return } + t.FailNow() +} + +// Greaterf asserts that the first element is greater than the second +// +// assert.Greaterf(t, 2, 1, "error message %s", "formatted") +// assert.Greaterf(t, float64(2, "error message %s", "formatted"), float64(1)) +// assert.Greaterf(t, "b", "a", "error message %s", "formatted") +func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Greaterf(t, e1, e2, msg, args...) { + return + } t.FailNow() } @@ -395,12 +489,12 @@ func FileExistsf(t TestingT, path string, msg string, args ...interface{}) { // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { - if assert.HTTPBodyContains(t, handler, method, url, values, str, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPBodyContains(t, handler, method, url, values, str, msgAndArgs...) { + return + } t.FailNow() } @@ -411,12 +505,12 @@ func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method string, url s // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { - if assert.HTTPBodyContainsf(t, handler, method, url, values, str, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPBodyContainsf(t, handler, method, url, values, str, msg, args...) { + return + } t.FailNow() } @@ -427,12 +521,12 @@ func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { - if assert.HTTPBodyNotContains(t, handler, method, url, values, str, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPBodyNotContains(t, handler, method, url, values, str, msgAndArgs...) { + return + } t.FailNow() } @@ -443,12 +537,12 @@ func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method string, ur // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { - if assert.HTTPBodyNotContainsf(t, handler, method, url, values, str, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPBodyNotContainsf(t, handler, method, url, values, str, msg, args...) { + return + } t.FailNow() } @@ -458,12 +552,12 @@ func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, u // // Returns whether the assertion was successful (true) or not (false). func HTTPError(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { - if assert.HTTPError(t, handler, method, url, values, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPError(t, handler, method, url, values, msgAndArgs...) { + return + } t.FailNow() } @@ -473,12 +567,12 @@ func HTTPError(t TestingT, handler http.HandlerFunc, method string, url string, // // Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { - if assert.HTTPErrorf(t, handler, method, url, values, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPErrorf(t, handler, method, url, values, msg, args...) { + return + } t.FailNow() } @@ -488,12 +582,12 @@ func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, // // Returns whether the assertion was successful (true) or not (false). func HTTPRedirect(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { - if assert.HTTPRedirect(t, handler, method, url, values, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPRedirect(t, handler, method, url, values, msgAndArgs...) { + return + } t.FailNow() } @@ -503,12 +597,12 @@ func HTTPRedirect(t TestingT, handler http.HandlerFunc, method string, url strin // // Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { - if assert.HTTPRedirectf(t, handler, method, url, values, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPRedirectf(t, handler, method, url, values, msg, args...) { + return + } t.FailNow() } @@ -518,12 +612,12 @@ func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url stri // // Returns whether the assertion was successful (true) or not (false). func HTTPSuccess(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { - if assert.HTTPSuccess(t, handler, method, url, values, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPSuccess(t, handler, method, url, values, msgAndArgs...) { + return + } t.FailNow() } @@ -533,12 +627,12 @@ func HTTPSuccess(t TestingT, handler http.HandlerFunc, method string, url string // // Returns whether the assertion was successful (true) or not (false). func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { - if assert.HTTPSuccessf(t, handler, method, url, values, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.HTTPSuccessf(t, handler, method, url, values, msg, args...) { + return + } t.FailNow() } @@ -546,12 +640,12 @@ func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url strin // // assert.Implements(t, (*MyInterface)(nil), new(MyObject)) func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) { - if assert.Implements(t, interfaceObject, object, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Implements(t, interfaceObject, object, msgAndArgs...) { + return + } t.FailNow() } @@ -559,148 +653,148 @@ func Implements(t TestingT, interfaceObject interface{}, object interface{}, msg // // assert.Implementsf(t, (*MyInterface, "error message %s", "formatted")(nil), new(MyObject)) func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) { - if assert.Implementsf(t, interfaceObject, object, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Implementsf(t, interfaceObject, object, msg, args...) { + return + } t.FailNow() } // InDelta asserts that the two numerals are within delta of each other. // -// assert.InDelta(t, math.Pi, (22 / 7.0), 0.01) +// assert.InDelta(t, math.Pi, 22/7.0, 0.01) func InDelta(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { - if assert.InDelta(t, expected, actual, delta, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InDelta(t, expected, actual, delta, msgAndArgs...) { + return + } t.FailNow() } // InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. func InDeltaMapValues(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { - if assert.InDeltaMapValues(t, expected, actual, delta, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InDeltaMapValues(t, expected, actual, delta, msgAndArgs...) { + return + } t.FailNow() } // InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. func InDeltaMapValuesf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { - if assert.InDeltaMapValuesf(t, expected, actual, delta, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InDeltaMapValuesf(t, expected, actual, delta, msg, args...) { + return + } t.FailNow() } // InDeltaSlice is the same as InDelta, except it compares two slices. func InDeltaSlice(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { - if assert.InDeltaSlice(t, expected, actual, delta, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InDeltaSlice(t, expected, actual, delta, msgAndArgs...) { + return + } t.FailNow() } // InDeltaSlicef is the same as InDelta, except it compares two slices. func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { - if assert.InDeltaSlicef(t, expected, actual, delta, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InDeltaSlicef(t, expected, actual, delta, msg, args...) { + return + } t.FailNow() } // InDeltaf asserts that the two numerals are within delta of each other. // -// assert.InDeltaf(t, math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { - if assert.InDeltaf(t, expected, actual, delta, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InDeltaf(t, expected, actual, delta, msg, args...) { + return + } t.FailNow() } // InEpsilon asserts that expected and actual have a relative error less than epsilon func InEpsilon(t TestingT, expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { - if assert.InEpsilon(t, expected, actual, epsilon, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InEpsilon(t, expected, actual, epsilon, msgAndArgs...) { + return + } t.FailNow() } // InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. func InEpsilonSlice(t TestingT, expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { - if assert.InEpsilonSlice(t, expected, actual, epsilon, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InEpsilonSlice(t, expected, actual, epsilon, msgAndArgs...) { + return + } t.FailNow() } // InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { - if assert.InEpsilonSlicef(t, expected, actual, epsilon, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InEpsilonSlicef(t, expected, actual, epsilon, msg, args...) { + return + } t.FailNow() } // InEpsilonf asserts that expected and actual have a relative error less than epsilon func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { - if assert.InEpsilonf(t, expected, actual, epsilon, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.InEpsilonf(t, expected, actual, epsilon, msg, args...) { + return + } t.FailNow() } // IsType asserts that the specified objects are of the same type. func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) { - if assert.IsType(t, expectedType, object, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.IsType(t, expectedType, object, msgAndArgs...) { + return + } t.FailNow() } // IsTypef asserts that the specified objects are of the same type. func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) { - if assert.IsTypef(t, expectedType, object, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.IsTypef(t, expectedType, object, msg, args...) { + return + } t.FailNow() } @@ -708,12 +802,12 @@ func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg strin // // assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) { - if assert.JSONEq(t, expected, actual, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.JSONEq(t, expected, actual, msgAndArgs...) { + return + } t.FailNow() } @@ -721,12 +815,12 @@ func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{ // // assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) { - if assert.JSONEqf(t, expected, actual, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.JSONEqf(t, expected, actual, msg, args...) { + return + } t.FailNow() } @@ -735,12 +829,12 @@ func JSONEqf(t TestingT, expected string, actual string, msg string, args ...int // // assert.Len(t, mySlice, 3) func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) { - if assert.Len(t, object, length, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Len(t, object, length, msgAndArgs...) { + return + } t.FailNow() } @@ -749,12 +843,102 @@ func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) // // assert.Lenf(t, mySlice, 3, "error message %s", "formatted") func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.Lenf(t, object, length, msg, args...) { return } + t.FailNow() +} + +// Less asserts that the first element is less than the second +// +// assert.Less(t, 1, 2) +// assert.Less(t, float64(1), float64(2)) +// assert.Less(t, "a", "b") +func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.Less(t, e1, e2, msgAndArgs...) { + return + } + t.FailNow() +} + +// LessOrEqual asserts that the first element is less than or equal to the second +// +// assert.LessOrEqual(t, 1, 2) +// assert.LessOrEqual(t, 2, 2) +// assert.LessOrEqual(t, "a", "b") +// assert.LessOrEqual(t, "b", "b") +func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.LessOrEqual(t, e1, e2, msgAndArgs...) { + return + } + t.FailNow() +} + +// LessOrEqualf asserts that the first element is less than or equal to the second +// +// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") +func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.LessOrEqualf(t, e1, e2, msg, args...) { + return + } + t.FailNow() +} + +// Lessf asserts that the first element is less than the second +// +// assert.Lessf(t, 1, 2, "error message %s", "formatted") +// assert.Lessf(t, float64(1, "error message %s", "formatted"), float64(2)) +// assert.Lessf(t, "a", "b", "error message %s", "formatted") +func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.Lessf(t, e1, e2, msg, args...) { + return + } + t.FailNow() +} + +// Never asserts that the given condition doesn't satisfy in waitFor time, +// periodically checking the target function each tick. +// +// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) +func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.Never(t, condition, waitFor, tick, msgAndArgs...) { + return + } + t.FailNow() +} + +// Neverf asserts that the given condition doesn't satisfy in waitFor time, +// periodically checking the target function each tick. +// +// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.Neverf(t, condition, waitFor, tick, msg, args...) { + return + } t.FailNow() } @@ -762,12 +946,12 @@ func Lenf(t TestingT, object interface{}, length int, msg string, args ...interf // // assert.Nil(t, err) func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) { - if assert.Nil(t, object, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Nil(t, object, msgAndArgs...) { + return + } t.FailNow() } @@ -775,12 +959,36 @@ func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) { // // assert.Nilf(t, err, "error message %s", "formatted") func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.Nilf(t, object, msg, args...) { return } + t.FailNow() +} + +// NoDirExists checks whether a directory does not exist in the given path. +// It fails if the path points to an existing _directory_ only. +func NoDirExists(t TestingT, path string, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.NoDirExists(t, path, msgAndArgs...) { + return + } + t.FailNow() +} + +// NoDirExistsf checks whether a directory does not exist in the given path. +// It fails if the path points to an existing _directory_ only. +func NoDirExistsf(t TestingT, path string, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NoDirExistsf(t, path, msg, args...) { + return + } t.FailNow() } @@ -791,12 +999,12 @@ func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) { // assert.Equal(t, expectedObj, actualObj) // } func NoError(t TestingT, err error, msgAndArgs ...interface{}) { - if assert.NoError(t, err, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NoError(t, err, msgAndArgs...) { + return + } t.FailNow() } @@ -807,12 +1015,36 @@ func NoError(t TestingT, err error, msgAndArgs ...interface{}) { // assert.Equal(t, expectedObj, actualObj) // } func NoErrorf(t TestingT, err error, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.NoErrorf(t, err, msg, args...) { return } + t.FailNow() +} + +// NoFileExists checks whether a file does not exist in a given path. It fails +// if the path points to an existing _file_ only. +func NoFileExists(t TestingT, path string, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NoFileExists(t, path, msgAndArgs...) { + return + } + t.FailNow() +} + +// NoFileExistsf checks whether a file does not exist in a given path. It fails +// if the path points to an existing _file_ only. +func NoFileExistsf(t TestingT, path string, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.NoFileExistsf(t, path, msg, args...) { + return + } t.FailNow() } @@ -823,12 +1055,12 @@ func NoErrorf(t TestingT, err error, msg string, args ...interface{}) { // assert.NotContains(t, ["Hello", "World"], "Earth") // assert.NotContains(t, {"Hello": "World"}, "Earth") func NotContains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) { - if assert.NotContains(t, s, contains, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotContains(t, s, contains, msgAndArgs...) { + return + } t.FailNow() } @@ -839,12 +1071,12 @@ func NotContains(t TestingT, s interface{}, contains interface{}, msgAndArgs ... // assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") // assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) { - if assert.NotContainsf(t, s, contains, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotContainsf(t, s, contains, msg, args...) { + return + } t.FailNow() } @@ -855,12 +1087,12 @@ func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, a // assert.Equal(t, "two", obj[1]) // } func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) { - if assert.NotEmpty(t, object, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotEmpty(t, object, msgAndArgs...) { + return + } t.FailNow() } @@ -871,12 +1103,12 @@ func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) { // assert.Equal(t, "two", obj[1]) // } func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) { - if assert.NotEmptyf(t, object, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotEmptyf(t, object, msg, args...) { + return + } t.FailNow() } @@ -887,12 +1119,12 @@ func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). func NotEqual(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { - if assert.NotEqual(t, expected, actual, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotEqual(t, expected, actual, msgAndArgs...) { + return + } t.FailNow() } @@ -903,12 +1135,12 @@ func NotEqual(t TestingT, expected interface{}, actual interface{}, msgAndArgs . // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { - if assert.NotEqualf(t, expected, actual, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotEqualf(t, expected, actual, msg, args...) { + return + } t.FailNow() } @@ -916,12 +1148,12 @@ func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, // // assert.NotNil(t, err) func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) { - if assert.NotNil(t, object, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotNil(t, object, msgAndArgs...) { + return + } t.FailNow() } @@ -929,12 +1161,12 @@ func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) { // // assert.NotNilf(t, err, "error message %s", "formatted") func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) { - if assert.NotNilf(t, object, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotNilf(t, object, msg, args...) { + return + } t.FailNow() } @@ -942,12 +1174,12 @@ func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) { // // assert.NotPanics(t, func(){ RemainCalm() }) func NotPanics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { - if assert.NotPanics(t, f, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotPanics(t, f, msgAndArgs...) { + return + } t.FailNow() } @@ -955,12 +1187,12 @@ func NotPanics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { // // assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") func NotPanicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) { - if assert.NotPanicsf(t, f, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotPanicsf(t, f, msg, args...) { + return + } t.FailNow() } @@ -969,12 +1201,12 @@ func NotPanicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interfac // assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") // assert.NotRegexp(t, "^start", "it's not starting") func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) { - if assert.NotRegexp(t, rx, str, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotRegexp(t, rx, str, msgAndArgs...) { + return + } t.FailNow() } @@ -983,12 +1215,44 @@ func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interf // assert.NotRegexpf(t, regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting") // assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.NotRegexpf(t, rx, str, msg, args...) { return } + t.FailNow() +} + +// NotSame asserts that two pointers do not reference the same object. +// +// assert.NotSame(t, ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func NotSame(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.NotSame(t, expected, actual, msgAndArgs...) { + return + } + t.FailNow() +} + +// NotSamef asserts that two pointers do not reference the same object. +// +// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotSamef(t, expected, actual, msg, args...) { + return + } t.FailNow() } @@ -997,12 +1261,12 @@ func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args .. // // assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { - if assert.NotSubset(t, list, subset, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotSubset(t, list, subset, msgAndArgs...) { + return + } t.FailNow() } @@ -1011,34 +1275,34 @@ func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...i // // assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { - if assert.NotSubsetf(t, list, subset, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotSubsetf(t, list, subset, msg, args...) { + return + } t.FailNow() } // NotZero asserts that i is not the zero value for its type. func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) { - if assert.NotZero(t, i, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotZero(t, i, msgAndArgs...) { + return + } t.FailNow() } // NotZerof asserts that i is not the zero value for its type. func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) { - if assert.NotZerof(t, i, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.NotZerof(t, i, msg, args...) { + return + } t.FailNow() } @@ -1046,12 +1310,42 @@ func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) { // // assert.Panics(t, func(){ GoCrazy() }) func Panics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.Panics(t, f, msgAndArgs...) { return } + t.FailNow() +} + +// PanicsWithError asserts that the code inside the specified PanicTestFunc +// panics, and that the recovered panic value is an error that satisfies the +// EqualError comparison. +// +// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) +func PanicsWithError(t TestingT, errString string, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.PanicsWithError(t, errString, f, msgAndArgs...) { + return + } + t.FailNow() +} + +// PanicsWithErrorf asserts that the code inside the specified PanicTestFunc +// panics, and that the recovered panic value is an error that satisfies the +// EqualError comparison. +// +// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func PanicsWithErrorf(t TestingT, errString string, f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.PanicsWithErrorf(t, errString, f, msg, args...) { + return + } t.FailNow() } @@ -1060,12 +1354,12 @@ func Panics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { // // assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) func PanicsWithValue(t TestingT, expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) { - if assert.PanicsWithValue(t, expected, f, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.PanicsWithValue(t, expected, f, msgAndArgs...) { + return + } t.FailNow() } @@ -1074,12 +1368,12 @@ func PanicsWithValue(t TestingT, expected interface{}, f assert.PanicTestFunc, m // // assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func PanicsWithValuef(t TestingT, expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) { - if assert.PanicsWithValuef(t, expected, f, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.PanicsWithValuef(t, expected, f, msg, args...) { + return + } t.FailNow() } @@ -1087,12 +1381,12 @@ func PanicsWithValuef(t TestingT, expected interface{}, f assert.PanicTestFunc, // // assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") func Panicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) { - if assert.Panicsf(t, f, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Panicsf(t, f, msg, args...) { + return + } t.FailNow() } @@ -1101,12 +1395,12 @@ func Panicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{} // assert.Regexp(t, regexp.MustCompile("start"), "it's starting") // assert.Regexp(t, "start...$", "it's not starting") func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) { - if assert.Regexp(t, rx, str, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Regexp(t, rx, str, msgAndArgs...) { + return + } t.FailNow() } @@ -1115,12 +1409,44 @@ func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface // assert.Regexpf(t, regexp.MustCompile("start", "error message %s", "formatted"), "it's starting") // assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.Regexpf(t, rx, str, msg, args...) { return } + t.FailNow() +} + +// Same asserts that two pointers reference the same object. +// +// assert.Same(t, ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func Same(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Same(t, expected, actual, msgAndArgs...) { + return + } + t.FailNow() +} + +// Samef asserts that two pointers reference the same object. +// +// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func Samef(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.Samef(t, expected, actual, msg, args...) { + return + } t.FailNow() } @@ -1129,12 +1455,12 @@ func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...in // // assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { - if assert.Subset(t, list, subset, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Subset(t, list, subset, msgAndArgs...) { + return + } t.FailNow() } @@ -1143,12 +1469,12 @@ func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...inte // // assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { - if assert.Subsetf(t, list, subset, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Subsetf(t, list, subset, msg, args...) { + return + } t.FailNow() } @@ -1156,12 +1482,12 @@ func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args // // assert.True(t, myBool) func True(t TestingT, value bool, msgAndArgs ...interface{}) { - if assert.True(t, value, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.True(t, value, msgAndArgs...) { + return + } t.FailNow() } @@ -1169,12 +1495,12 @@ func True(t TestingT, value bool, msgAndArgs ...interface{}) { // // assert.Truef(t, myBool, "error message %s", "formatted") func Truef(t TestingT, value bool, msg string, args ...interface{}) { - if assert.Truef(t, value, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Truef(t, value, msg, args...) { + return + } t.FailNow() } @@ -1182,12 +1508,12 @@ func Truef(t TestingT, value bool, msg string, args ...interface{}) { // // assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) func WithinDuration(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) { - if assert.WithinDuration(t, expected, actual, delta, msgAndArgs...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.WithinDuration(t, expected, actual, delta, msgAndArgs...) { + return + } t.FailNow() } @@ -1195,33 +1521,55 @@ func WithinDuration(t TestingT, expected time.Time, actual time.Time, delta time // // assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } if assert.WithinDurationf(t, expected, actual, delta, msg, args...) { return } + t.FailNow() +} + +// YAMLEq asserts that two YAML strings are equivalent. +func YAMLEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.YAMLEq(t, expected, actual, msgAndArgs...) { + return + } t.FailNow() } -// Zero asserts that i is the zero value for its type. -func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) { - if assert.Zero(t, i, msgAndArgs...) { +// YAMLEqf asserts that two YAML strings are equivalent. +func YAMLEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.YAMLEqf(t, expected, actual, msg, args...) { return } + t.FailNow() +} + +// Zero asserts that i is the zero value for its type. +func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Zero(t, i, msgAndArgs...) { + return + } t.FailNow() } // Zerof asserts that i is the zero value for its type. func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) { - if assert.Zerof(t, i, msg, args...) { - return - } if h, ok := t.(tHelper); ok { h.Helper() } + if assert.Zerof(t, i, msg, args...) { + return + } t.FailNow() } diff --git a/vendor/github.com/stretchr/testify/require/require_forward.go b/vendor/github.com/stretchr/testify/require/require_forward.go index 9fe41dbdc0c..5aac226df83 100644 --- a/vendor/github.com/stretchr/testify/require/require_forward.go +++ b/vendor/github.com/stretchr/testify/require/require_forward.go @@ -54,7 +54,8 @@ func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, Containsf(a.t, s, contains, msg, args...) } -// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +// DirExists checks whether a directory exists in the given path. It also fails +// if the path is a file rather a directory or there is an error checking whether it exists. func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -62,7 +63,8 @@ func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) { DirExists(a.t, path, msgAndArgs...) } -// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +// DirExistsf checks whether a directory exists in the given path. It also fails +// if the path is a file rather a directory or there is an error checking whether it exists. func (a *Assertions) DirExistsf(path string, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -216,6 +218,28 @@ func (a *Assertions) Errorf(err error, msg string, args ...interface{}) { Errorf(a.t, err, msg, args...) } +// Eventually asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) +func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Eventually(a.t, condition, waitFor, tick, msgAndArgs...) +} + +// Eventuallyf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Eventuallyf(a.t, condition, waitFor, tick, msg, args...) +} + // Exactly asserts that two objects are equal in value and type. // // a.Exactly(int32(123), int64(123)) @@ -288,7 +312,8 @@ func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) { Falsef(a.t, value, msg, args...) } -// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +// FileExists checks whether a file exists in the given path. It also fails if +// the path points to a directory or there is an error when trying to check the file. func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -296,7 +321,8 @@ func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) { FileExists(a.t, path, msgAndArgs...) } -// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +// FileExistsf checks whether a file exists in the given path. It also fails if +// the path points to a directory or there is an error when trying to check the file. func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -304,6 +330,56 @@ func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) { FileExistsf(a.t, path, msg, args...) } +// Greater asserts that the first element is greater than the second +// +// a.Greater(2, 1) +// a.Greater(float64(2), float64(1)) +// a.Greater("b", "a") +func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Greater(a.t, e1, e2, msgAndArgs...) +} + +// GreaterOrEqual asserts that the first element is greater than or equal to the second +// +// a.GreaterOrEqual(2, 1) +// a.GreaterOrEqual(2, 2) +// a.GreaterOrEqual("b", "a") +// a.GreaterOrEqual("b", "b") +func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + GreaterOrEqual(a.t, e1, e2, msgAndArgs...) +} + +// GreaterOrEqualf asserts that the first element is greater than or equal to the second +// +// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") +// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") +// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") +// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") +func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + GreaterOrEqualf(a.t, e1, e2, msg, args...) +} + +// Greaterf asserts that the first element is greater than the second +// +// a.Greaterf(2, 1, "error message %s", "formatted") +// a.Greaterf(float64(2, "error message %s", "formatted"), float64(1)) +// a.Greaterf("b", "a", "error message %s", "formatted") +func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Greaterf(a.t, e1, e2, msg, args...) +} + // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // @@ -450,7 +526,7 @@ func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{} // InDelta asserts that the two numerals are within delta of each other. // -// a.InDelta(math.Pi, (22 / 7.0), 0.01) +// a.InDelta(math.Pi, 22/7.0, 0.01) func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -492,7 +568,7 @@ func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, del // InDeltaf asserts that the two numerals are within delta of each other. // -// a.InDeltaf(math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -590,6 +666,78 @@ func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...in Lenf(a.t, object, length, msg, args...) } +// Less asserts that the first element is less than the second +// +// a.Less(1, 2) +// a.Less(float64(1), float64(2)) +// a.Less("a", "b") +func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Less(a.t, e1, e2, msgAndArgs...) +} + +// LessOrEqual asserts that the first element is less than or equal to the second +// +// a.LessOrEqual(1, 2) +// a.LessOrEqual(2, 2) +// a.LessOrEqual("a", "b") +// a.LessOrEqual("b", "b") +func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + LessOrEqual(a.t, e1, e2, msgAndArgs...) +} + +// LessOrEqualf asserts that the first element is less than or equal to the second +// +// a.LessOrEqualf(1, 2, "error message %s", "formatted") +// a.LessOrEqualf(2, 2, "error message %s", "formatted") +// a.LessOrEqualf("a", "b", "error message %s", "formatted") +// a.LessOrEqualf("b", "b", "error message %s", "formatted") +func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + LessOrEqualf(a.t, e1, e2, msg, args...) +} + +// Lessf asserts that the first element is less than the second +// +// a.Lessf(1, 2, "error message %s", "formatted") +// a.Lessf(float64(1, "error message %s", "formatted"), float64(2)) +// a.Lessf("a", "b", "error message %s", "formatted") +func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Lessf(a.t, e1, e2, msg, args...) +} + +// Never asserts that the given condition doesn't satisfy in waitFor time, +// periodically checking the target function each tick. +// +// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) +func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Never(a.t, condition, waitFor, tick, msgAndArgs...) +} + +// Neverf asserts that the given condition doesn't satisfy in waitFor time, +// periodically checking the target function each tick. +// +// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Neverf(a.t, condition, waitFor, tick, msg, args...) +} + // Nil asserts that the specified object is nil. // // a.Nil(err) @@ -610,6 +758,24 @@ func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) { Nilf(a.t, object, msg, args...) } +// NoDirExists checks whether a directory does not exist in the given path. +// It fails if the path points to an existing _directory_ only. +func (a *Assertions) NoDirExists(path string, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + NoDirExists(a.t, path, msgAndArgs...) +} + +// NoDirExistsf checks whether a directory does not exist in the given path. +// It fails if the path points to an existing _directory_ only. +func (a *Assertions) NoDirExistsf(path string, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + NoDirExistsf(a.t, path, msg, args...) +} + // NoError asserts that a function returned no error (i.e. `nil`). // // actualObj, err := SomeFunction() @@ -636,6 +802,24 @@ func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) { NoErrorf(a.t, err, msg, args...) } +// NoFileExists checks whether a file does not exist in a given path. It fails +// if the path points to an existing _file_ only. +func (a *Assertions) NoFileExists(path string, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + NoFileExists(a.t, path, msgAndArgs...) +} + +// NoFileExistsf checks whether a file does not exist in a given path. It fails +// if the path points to an existing _file_ only. +func (a *Assertions) NoFileExistsf(path string, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + NoFileExistsf(a.t, path, msg, args...) +} + // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // @@ -776,6 +960,32 @@ func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, arg NotRegexpf(a.t, rx, str, msg, args...) } +// NotSame asserts that two pointers do not reference the same object. +// +// a.NotSame(ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) NotSame(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + NotSame(a.t, expected, actual, msgAndArgs...) +} + +// NotSamef asserts that two pointers do not reference the same object. +// +// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + NotSamef(a.t, expected, actual, msg, args...) +} + // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // @@ -824,6 +1034,30 @@ func (a *Assertions) Panics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { Panics(a.t, f, msgAndArgs...) } +// PanicsWithError asserts that the code inside the specified PanicTestFunc +// panics, and that the recovered panic value is an error that satisfies the +// EqualError comparison. +// +// a.PanicsWithError("crazy error", func(){ GoCrazy() }) +func (a *Assertions) PanicsWithError(errString string, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + PanicsWithError(a.t, errString, f, msgAndArgs...) +} + +// PanicsWithErrorf asserts that the code inside the specified PanicTestFunc +// panics, and that the recovered panic value is an error that satisfies the +// EqualError comparison. +// +// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func (a *Assertions) PanicsWithErrorf(errString string, f assert.PanicTestFunc, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + PanicsWithErrorf(a.t, errString, f, msg, args...) +} + // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // @@ -878,6 +1112,32 @@ func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args . Regexpf(a.t, rx, str, msg, args...) } +// Same asserts that two pointers reference the same object. +// +// a.Same(ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) Same(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Same(a.t, expected, actual, msgAndArgs...) +} + +// Samef asserts that two pointers reference the same object. +// +// a.Samef(ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + Samef(a.t, expected, actual, msg, args...) +} + // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // @@ -940,6 +1200,22 @@ func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta WithinDurationf(a.t, expected, actual, delta, msg, args...) } +// YAMLEq asserts that two YAML strings are equivalent. +func (a *Assertions) YAMLEq(expected string, actual string, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + YAMLEq(a.t, expected, actual, msgAndArgs...) +} + +// YAMLEqf asserts that two YAML strings are equivalent. +func (a *Assertions) YAMLEqf(expected string, actual string, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + YAMLEqf(a.t, expected, actual, msg, args...) +} + // Zero asserts that i is the zero value for its type. func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { diff --git a/vendor/github.com/stretchr/testify/require/requirements.go b/vendor/github.com/stretchr/testify/require/requirements.go index 690583a8e03..91772dfeb91 100644 --- a/vendor/github.com/stretchr/testify/require/requirements.go +++ b/vendor/github.com/stretchr/testify/require/requirements.go @@ -22,8 +22,8 @@ type ValueAssertionFunc func(TestingT, interface{}, ...interface{}) // for table driven tests. type BoolAssertionFunc func(TestingT, bool, ...interface{}) -// ValuesAssertionFunc is a common function prototype when validating an error value. Can be useful +// ErrorAssertionFunc is a common function prototype when validating an error value. Can be useful // for table driven tests. type ErrorAssertionFunc func(TestingT, error, ...interface{}) -//go:generate go run ../_codegen/main.go -output-package=require -template=require.go.tmpl -include-format-funcs +//go:generate sh -c "cd ../_codegen && go build && cd - && ../_codegen/_codegen -output-package=require -template=require.go.tmpl -include-format-funcs" diff --git a/vendor/google.golang.org/grpc/CONTRIBUTING.md b/vendor/google.golang.org/grpc/CONTRIBUTING.md index 4f1567e2f95..cd03f8c7688 100644 --- a/vendor/google.golang.org/grpc/CONTRIBUTING.md +++ b/vendor/google.golang.org/grpc/CONTRIBUTING.md @@ -57,6 +57,5 @@ How to get your contributions merged smoothly and quickly. - `make vet` to catch vet errors - `make test` to run the tests - `make testrace` to run tests in race mode - - optional `make testappengine` to run tests with appengine - Exceptions to the rules can be made if there's a compelling reason for doing so. diff --git a/vendor/google.golang.org/grpc/README.md b/vendor/google.golang.org/grpc/README.md index fef78e4a1df..3949a683fb5 100644 --- a/vendor/google.golang.org/grpc/README.md +++ b/vendor/google.golang.org/grpc/README.md @@ -1,7 +1,7 @@ # gRPC-Go [![Build Status](https://travis-ci.org/grpc/grpc-go.svg)](https://travis-ci.org/grpc/grpc-go) -[![GoDoc](https://godoc.org/google.golang.org/grpc?status.svg)][API] +[![GoDoc](https://pkg.go.dev/badge/google.golang.org/grpc)][API] [![GoReportCard](https://goreportcard.com/badge/grpc/grpc-go)](https://goreportcard.com/report/github.com/grpc/grpc-go) The [Go][] implementation of [gRPC][]: A high performance, open source, general @@ -131,7 +131,7 @@ the root cause of the connection being closed is on the server side. Turn on logging on __both client and server__, and see if there are any transport errors. -[API]: https://grpc.io/docs/languages/go/api +[API]: https://pkg.go.dev/google.golang.org/grpc [Go]: https://golang.org [Go module]: https://github.com/golang/go/wiki/Modules [gRPC]: https://grpc.io diff --git a/vendor/google.golang.org/grpc/SECURITY.md b/vendor/google.golang.org/grpc/SECURITY.md new file mode 100644 index 00000000000..be6e108705c --- /dev/null +++ b/vendor/google.golang.org/grpc/SECURITY.md @@ -0,0 +1,3 @@ +# Security Policy + +For information on gRPC Security Policy and reporting potentional security issues, please see [gRPC CVE Process](https://github.com/grpc/proposal/blob/master/P4-grpc-cve-process.md). diff --git a/vendor/google.golang.org/grpc/attributes/attributes.go b/vendor/google.golang.org/grpc/attributes/attributes.go index ee5c51e6cdb..3220d87be40 100644 --- a/vendor/google.golang.org/grpc/attributes/attributes.go +++ b/vendor/google.golang.org/grpc/attributes/attributes.go @@ -19,7 +19,10 @@ // Package attributes defines a generic key/value store used in various gRPC // components. // -// All APIs in this package are EXPERIMENTAL. +// Experimental +// +// Notice: This package is EXPERIMENTAL and may be changed or removed in a +// later release. package attributes import "fmt" diff --git a/vendor/google.golang.org/grpc/backoff.go b/vendor/google.golang.org/grpc/backoff.go index ff7c3ee6f48..542594f5cc5 100644 --- a/vendor/google.golang.org/grpc/backoff.go +++ b/vendor/google.golang.org/grpc/backoff.go @@ -48,7 +48,10 @@ type BackoffConfig struct { // here for more details: // https://github.com/grpc/grpc/blob/master/doc/connection-backoff.md. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type ConnectParams struct { // Backoff specifies the configuration options for connection backoff. Backoff backoff.Config diff --git a/vendor/google.golang.org/grpc/balancer/balancer.go b/vendor/google.golang.org/grpc/balancer/balancer.go index 8bf359dbfda..788759bde4b 100644 --- a/vendor/google.golang.org/grpc/balancer/balancer.go +++ b/vendor/google.golang.org/grpc/balancer/balancer.go @@ -174,6 +174,10 @@ type BuildOptions struct { Dialer func(context.Context, string) (net.Conn, error) // ChannelzParentID is the entity parent's channelz unique identification number. ChannelzParentID int64 + // CustomUserAgent is the custom user agent set on the parent ClientConn. + // The balancer should set the same custom user agent if it creates a + // ClientConn. + CustomUserAgent string // Target contains the parsed address info of the dial target. It is the same resolver.Target as // passed to the resolver. // See the documentation for the resolver.Target type for details about what it contains. diff --git a/vendor/google.golang.org/grpc/balancer/base/balancer.go b/vendor/google.golang.org/grpc/balancer/base/balancer.go index 32d782f1cf5..e0d34288ccf 100644 --- a/vendor/google.golang.org/grpc/balancer/base/balancer.go +++ b/vendor/google.golang.org/grpc/balancer/base/balancer.go @@ -64,7 +64,7 @@ type baseBalancer struct { csEvltr *balancer.ConnectivityStateEvaluator state connectivity.State - subConns map[resolver.Address]balancer.SubConn + subConns map[resolver.Address]balancer.SubConn // `attributes` is stripped from the keys of this map (the addresses) scStates map[balancer.SubConn]connectivity.State picker balancer.Picker config Config @@ -101,17 +101,41 @@ func (b *baseBalancer) UpdateClientConnState(s balancer.ClientConnState) error { // addrsSet is the set converted from addrs, it's used for quick lookup of an address. addrsSet := make(map[resolver.Address]struct{}) for _, a := range s.ResolverState.Addresses { - addrsSet[a] = struct{}{} - if _, ok := b.subConns[a]; !ok { + // Strip attributes from addresses before using them as map keys. So + // that when two addresses only differ in attributes pointers (but with + // the same attribute content), they are considered the same address. + // + // Note that this doesn't handle the case where the attribute content is + // different. So if users want to set different attributes to create + // duplicate connections to the same backend, it doesn't work. This is + // fine for now, because duplicate is done by setting Metadata today. + // + // TODO: read attributes to handle duplicate connections. + aNoAttrs := a + aNoAttrs.Attributes = nil + addrsSet[aNoAttrs] = struct{}{} + if sc, ok := b.subConns[aNoAttrs]; !ok { // a is a new address (not existing in b.subConns). + // + // When creating SubConn, the original address with attributes is + // passed through. So that connection configurations in attributes + // (like creds) will be used. sc, err := b.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{HealthCheckEnabled: b.config.HealthCheck}) if err != nil { logger.Warningf("base.baseBalancer: failed to create new SubConn: %v", err) continue } - b.subConns[a] = sc + b.subConns[aNoAttrs] = sc b.scStates[sc] = connectivity.Idle sc.Connect() + } else { + // Always update the subconn's address in case the attributes + // changed. + // + // The SubConn does a reflect.DeepEqual of the new and old + // addresses. So this is a noop if the current address is the same + // as the old one (including attributes). + sc.UpdateAddresses([]resolver.Address{a}) } } for a, sc := range b.subConns { diff --git a/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go b/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go index f826ec76984..ed75290cdf3 100644 --- a/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go +++ b/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go @@ -1,26 +1,49 @@ +// Copyright 2018 The gRPC Authors +// All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The canonical version of this proto can be found at +// https://github.com/grpc/grpc-proto/blob/master/grpc/binlog/v1/binarylog.proto + // Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.25.0 +// protoc v3.14.0 // source: grpc/binlog/v1/binarylog.proto package grpc_binarylog_v1 import ( - fmt "fmt" proto "github.com/golang/protobuf/proto" - duration "github.com/golang/protobuf/ptypes/duration" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - math "math" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + durationpb "google.golang.org/protobuf/types/known/durationpb" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" ) -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package +// This is a compile-time assertion that a sufficiently up-to-date version +// of the legacy proto package is being used. +const _ = proto.ProtoPackageIsVersion4 // Enumerates the type of event // Note the terminology is different from the RPC semantics @@ -56,34 +79,55 @@ const ( GrpcLogEntry_EVENT_TYPE_CANCEL GrpcLogEntry_EventType = 7 ) -var GrpcLogEntry_EventType_name = map[int32]string{ - 0: "EVENT_TYPE_UNKNOWN", - 1: "EVENT_TYPE_CLIENT_HEADER", - 2: "EVENT_TYPE_SERVER_HEADER", - 3: "EVENT_TYPE_CLIENT_MESSAGE", - 4: "EVENT_TYPE_SERVER_MESSAGE", - 5: "EVENT_TYPE_CLIENT_HALF_CLOSE", - 6: "EVENT_TYPE_SERVER_TRAILER", - 7: "EVENT_TYPE_CANCEL", -} +// Enum value maps for GrpcLogEntry_EventType. +var ( + GrpcLogEntry_EventType_name = map[int32]string{ + 0: "EVENT_TYPE_UNKNOWN", + 1: "EVENT_TYPE_CLIENT_HEADER", + 2: "EVENT_TYPE_SERVER_HEADER", + 3: "EVENT_TYPE_CLIENT_MESSAGE", + 4: "EVENT_TYPE_SERVER_MESSAGE", + 5: "EVENT_TYPE_CLIENT_HALF_CLOSE", + 6: "EVENT_TYPE_SERVER_TRAILER", + 7: "EVENT_TYPE_CANCEL", + } + GrpcLogEntry_EventType_value = map[string]int32{ + "EVENT_TYPE_UNKNOWN": 0, + "EVENT_TYPE_CLIENT_HEADER": 1, + "EVENT_TYPE_SERVER_HEADER": 2, + "EVENT_TYPE_CLIENT_MESSAGE": 3, + "EVENT_TYPE_SERVER_MESSAGE": 4, + "EVENT_TYPE_CLIENT_HALF_CLOSE": 5, + "EVENT_TYPE_SERVER_TRAILER": 6, + "EVENT_TYPE_CANCEL": 7, + } +) -var GrpcLogEntry_EventType_value = map[string]int32{ - "EVENT_TYPE_UNKNOWN": 0, - "EVENT_TYPE_CLIENT_HEADER": 1, - "EVENT_TYPE_SERVER_HEADER": 2, - "EVENT_TYPE_CLIENT_MESSAGE": 3, - "EVENT_TYPE_SERVER_MESSAGE": 4, - "EVENT_TYPE_CLIENT_HALF_CLOSE": 5, - "EVENT_TYPE_SERVER_TRAILER": 6, - "EVENT_TYPE_CANCEL": 7, +func (x GrpcLogEntry_EventType) Enum() *GrpcLogEntry_EventType { + p := new(GrpcLogEntry_EventType) + *p = x + return p } func (x GrpcLogEntry_EventType) String() string { - return proto.EnumName(GrpcLogEntry_EventType_name, int32(x)) + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (GrpcLogEntry_EventType) Descriptor() protoreflect.EnumDescriptor { + return file_grpc_binlog_v1_binarylog_proto_enumTypes[0].Descriptor() +} + +func (GrpcLogEntry_EventType) Type() protoreflect.EnumType { + return &file_grpc_binlog_v1_binarylog_proto_enumTypes[0] +} + +func (x GrpcLogEntry_EventType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) } +// Deprecated: Use GrpcLogEntry_EventType.Descriptor instead. func (GrpcLogEntry_EventType) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{0, 0} + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{0, 0} } // Enumerates the entity that generates the log entry @@ -95,24 +139,45 @@ const ( GrpcLogEntry_LOGGER_SERVER GrpcLogEntry_Logger = 2 ) -var GrpcLogEntry_Logger_name = map[int32]string{ - 0: "LOGGER_UNKNOWN", - 1: "LOGGER_CLIENT", - 2: "LOGGER_SERVER", -} +// Enum value maps for GrpcLogEntry_Logger. +var ( + GrpcLogEntry_Logger_name = map[int32]string{ + 0: "LOGGER_UNKNOWN", + 1: "LOGGER_CLIENT", + 2: "LOGGER_SERVER", + } + GrpcLogEntry_Logger_value = map[string]int32{ + "LOGGER_UNKNOWN": 0, + "LOGGER_CLIENT": 1, + "LOGGER_SERVER": 2, + } +) -var GrpcLogEntry_Logger_value = map[string]int32{ - "LOGGER_UNKNOWN": 0, - "LOGGER_CLIENT": 1, - "LOGGER_SERVER": 2, +func (x GrpcLogEntry_Logger) Enum() *GrpcLogEntry_Logger { + p := new(GrpcLogEntry_Logger) + *p = x + return p } func (x GrpcLogEntry_Logger) String() string { - return proto.EnumName(GrpcLogEntry_Logger_name, int32(x)) + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } +func (GrpcLogEntry_Logger) Descriptor() protoreflect.EnumDescriptor { + return file_grpc_binlog_v1_binarylog_proto_enumTypes[1].Descriptor() +} + +func (GrpcLogEntry_Logger) Type() protoreflect.EnumType { + return &file_grpc_binlog_v1_binarylog_proto_enumTypes[1] +} + +func (x GrpcLogEntry_Logger) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use GrpcLogEntry_Logger.Descriptor instead. func (GrpcLogEntry_Logger) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{0, 1} + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{0, 1} } type Address_Type int32 @@ -128,32 +193,57 @@ const ( Address_TYPE_UNIX Address_Type = 3 ) -var Address_Type_name = map[int32]string{ - 0: "TYPE_UNKNOWN", - 1: "TYPE_IPV4", - 2: "TYPE_IPV6", - 3: "TYPE_UNIX", -} +// Enum value maps for Address_Type. +var ( + Address_Type_name = map[int32]string{ + 0: "TYPE_UNKNOWN", + 1: "TYPE_IPV4", + 2: "TYPE_IPV6", + 3: "TYPE_UNIX", + } + Address_Type_value = map[string]int32{ + "TYPE_UNKNOWN": 0, + "TYPE_IPV4": 1, + "TYPE_IPV6": 2, + "TYPE_UNIX": 3, + } +) -var Address_Type_value = map[string]int32{ - "TYPE_UNKNOWN": 0, - "TYPE_IPV4": 1, - "TYPE_IPV6": 2, - "TYPE_UNIX": 3, +func (x Address_Type) Enum() *Address_Type { + p := new(Address_Type) + *p = x + return p } func (x Address_Type) String() string { - return proto.EnumName(Address_Type_name, int32(x)) + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Address_Type) Descriptor() protoreflect.EnumDescriptor { + return file_grpc_binlog_v1_binarylog_proto_enumTypes[2].Descriptor() +} + +func (Address_Type) Type() protoreflect.EnumType { + return &file_grpc_binlog_v1_binarylog_proto_enumTypes[2] } +func (x Address_Type) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Address_Type.Descriptor instead. func (Address_Type) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{7, 0} + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{7, 0} } // Log entry we store in binary logs type GrpcLogEntry struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // The timestamp of the binary log message - Timestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + Timestamp *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"` // Uniquely identifies a call. The value must not be 0 in order to disambiguate // from an unset value. // Each call may have several log entries, they will all have the same call_id. @@ -166,11 +256,11 @@ type GrpcLogEntry struct { // durability or ordering is not guaranteed. SequenceIdWithinCall uint64 `protobuf:"varint,3,opt,name=sequence_id_within_call,json=sequenceIdWithinCall,proto3" json:"sequence_id_within_call,omitempty"` Type GrpcLogEntry_EventType `protobuf:"varint,4,opt,name=type,proto3,enum=grpc.binarylog.v1.GrpcLogEntry_EventType" json:"type,omitempty"` - Logger GrpcLogEntry_Logger `protobuf:"varint,5,opt,name=logger,proto3,enum=grpc.binarylog.v1.GrpcLogEntry_Logger" json:"logger,omitempty"` + Logger GrpcLogEntry_Logger `protobuf:"varint,5,opt,name=logger,proto3,enum=grpc.binarylog.v1.GrpcLogEntry_Logger" json:"logger,omitempty"` // One of the above Logger enum // The logger uses one of the following fields to record the payload, // according to the type of the log entry. // - // Types that are valid to be assigned to Payload: + // Types that are assignable to Payload: // *GrpcLogEntry_ClientHeader // *GrpcLogEntry_ServerHeader // *GrpcLogEntry_Message @@ -183,100 +273,76 @@ type GrpcLogEntry struct { // EVENT_TYPE_SERVER_HEADER normally or EVENT_TYPE_SERVER_TRAILER in // the case of trailers-only. On server side, peer is always // logged on EVENT_TYPE_CLIENT_HEADER. - Peer *Address `protobuf:"bytes,11,opt,name=peer,proto3" json:"peer,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Peer *Address `protobuf:"bytes,11,opt,name=peer,proto3" json:"peer,omitempty"` } -func (m *GrpcLogEntry) Reset() { *m = GrpcLogEntry{} } -func (m *GrpcLogEntry) String() string { return proto.CompactTextString(m) } -func (*GrpcLogEntry) ProtoMessage() {} -func (*GrpcLogEntry) Descriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{0} +func (x *GrpcLogEntry) Reset() { + *x = GrpcLogEntry{} + if protoimpl.UnsafeEnabled { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *GrpcLogEntry) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GrpcLogEntry.Unmarshal(m, b) -} -func (m *GrpcLogEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GrpcLogEntry.Marshal(b, m, deterministic) -} -func (m *GrpcLogEntry) XXX_Merge(src proto.Message) { - xxx_messageInfo_GrpcLogEntry.Merge(m, src) +func (x *GrpcLogEntry) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *GrpcLogEntry) XXX_Size() int { - return xxx_messageInfo_GrpcLogEntry.Size(m) -} -func (m *GrpcLogEntry) XXX_DiscardUnknown() { - xxx_messageInfo_GrpcLogEntry.DiscardUnknown(m) + +func (*GrpcLogEntry) ProtoMessage() {} + +func (x *GrpcLogEntry) ProtoReflect() protoreflect.Message { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_GrpcLogEntry proto.InternalMessageInfo +// Deprecated: Use GrpcLogEntry.ProtoReflect.Descriptor instead. +func (*GrpcLogEntry) Descriptor() ([]byte, []int) { + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{0} +} -func (m *GrpcLogEntry) GetTimestamp() *timestamp.Timestamp { - if m != nil { - return m.Timestamp +func (x *GrpcLogEntry) GetTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.Timestamp } return nil } -func (m *GrpcLogEntry) GetCallId() uint64 { - if m != nil { - return m.CallId +func (x *GrpcLogEntry) GetCallId() uint64 { + if x != nil { + return x.CallId } return 0 } -func (m *GrpcLogEntry) GetSequenceIdWithinCall() uint64 { - if m != nil { - return m.SequenceIdWithinCall +func (x *GrpcLogEntry) GetSequenceIdWithinCall() uint64 { + if x != nil { + return x.SequenceIdWithinCall } return 0 } -func (m *GrpcLogEntry) GetType() GrpcLogEntry_EventType { - if m != nil { - return m.Type +func (x *GrpcLogEntry) GetType() GrpcLogEntry_EventType { + if x != nil { + return x.Type } return GrpcLogEntry_EVENT_TYPE_UNKNOWN } -func (m *GrpcLogEntry) GetLogger() GrpcLogEntry_Logger { - if m != nil { - return m.Logger +func (x *GrpcLogEntry) GetLogger() GrpcLogEntry_Logger { + if x != nil { + return x.Logger } return GrpcLogEntry_LOGGER_UNKNOWN } -type isGrpcLogEntry_Payload interface { - isGrpcLogEntry_Payload() -} - -type GrpcLogEntry_ClientHeader struct { - ClientHeader *ClientHeader `protobuf:"bytes,6,opt,name=client_header,json=clientHeader,proto3,oneof"` -} - -type GrpcLogEntry_ServerHeader struct { - ServerHeader *ServerHeader `protobuf:"bytes,7,opt,name=server_header,json=serverHeader,proto3,oneof"` -} - -type GrpcLogEntry_Message struct { - Message *Message `protobuf:"bytes,8,opt,name=message,proto3,oneof"` -} - -type GrpcLogEntry_Trailer struct { - Trailer *Trailer `protobuf:"bytes,9,opt,name=trailer,proto3,oneof"` -} - -func (*GrpcLogEntry_ClientHeader) isGrpcLogEntry_Payload() {} - -func (*GrpcLogEntry_ServerHeader) isGrpcLogEntry_Payload() {} - -func (*GrpcLogEntry_Message) isGrpcLogEntry_Payload() {} - -func (*GrpcLogEntry_Trailer) isGrpcLogEntry_Payload() {} - func (m *GrpcLogEntry) GetPayload() isGrpcLogEntry_Payload { if m != nil { return m.Payload @@ -284,59 +350,82 @@ func (m *GrpcLogEntry) GetPayload() isGrpcLogEntry_Payload { return nil } -func (m *GrpcLogEntry) GetClientHeader() *ClientHeader { - if x, ok := m.GetPayload().(*GrpcLogEntry_ClientHeader); ok { +func (x *GrpcLogEntry) GetClientHeader() *ClientHeader { + if x, ok := x.GetPayload().(*GrpcLogEntry_ClientHeader); ok { return x.ClientHeader } return nil } -func (m *GrpcLogEntry) GetServerHeader() *ServerHeader { - if x, ok := m.GetPayload().(*GrpcLogEntry_ServerHeader); ok { +func (x *GrpcLogEntry) GetServerHeader() *ServerHeader { + if x, ok := x.GetPayload().(*GrpcLogEntry_ServerHeader); ok { return x.ServerHeader } return nil } -func (m *GrpcLogEntry) GetMessage() *Message { - if x, ok := m.GetPayload().(*GrpcLogEntry_Message); ok { +func (x *GrpcLogEntry) GetMessage() *Message { + if x, ok := x.GetPayload().(*GrpcLogEntry_Message); ok { return x.Message } return nil } -func (m *GrpcLogEntry) GetTrailer() *Trailer { - if x, ok := m.GetPayload().(*GrpcLogEntry_Trailer); ok { +func (x *GrpcLogEntry) GetTrailer() *Trailer { + if x, ok := x.GetPayload().(*GrpcLogEntry_Trailer); ok { return x.Trailer } return nil } -func (m *GrpcLogEntry) GetPayloadTruncated() bool { - if m != nil { - return m.PayloadTruncated +func (x *GrpcLogEntry) GetPayloadTruncated() bool { + if x != nil { + return x.PayloadTruncated } return false } -func (m *GrpcLogEntry) GetPeer() *Address { - if m != nil { - return m.Peer +func (x *GrpcLogEntry) GetPeer() *Address { + if x != nil { + return x.Peer } return nil } -// XXX_OneofWrappers is for the internal use of the proto package. -func (*GrpcLogEntry) XXX_OneofWrappers() []interface{} { - return []interface{}{ - (*GrpcLogEntry_ClientHeader)(nil), - (*GrpcLogEntry_ServerHeader)(nil), - (*GrpcLogEntry_Message)(nil), - (*GrpcLogEntry_Trailer)(nil), - } +type isGrpcLogEntry_Payload interface { + isGrpcLogEntry_Payload() } +type GrpcLogEntry_ClientHeader struct { + ClientHeader *ClientHeader `protobuf:"bytes,6,opt,name=client_header,json=clientHeader,proto3,oneof"` +} + +type GrpcLogEntry_ServerHeader struct { + ServerHeader *ServerHeader `protobuf:"bytes,7,opt,name=server_header,json=serverHeader,proto3,oneof"` +} + +type GrpcLogEntry_Message struct { + // Used by EVENT_TYPE_CLIENT_MESSAGE, EVENT_TYPE_SERVER_MESSAGE + Message *Message `protobuf:"bytes,8,opt,name=message,proto3,oneof"` +} + +type GrpcLogEntry_Trailer struct { + Trailer *Trailer `protobuf:"bytes,9,opt,name=trailer,proto3,oneof"` +} + +func (*GrpcLogEntry_ClientHeader) isGrpcLogEntry_Payload() {} + +func (*GrpcLogEntry_ServerHeader) isGrpcLogEntry_Payload() {} + +func (*GrpcLogEntry_Message) isGrpcLogEntry_Payload() {} + +func (*GrpcLogEntry_Trailer) isGrpcLogEntry_Payload() {} + type ClientHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // This contains only the metadata from the application. Metadata *Metadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` // The name of the RPC method, which looks something like: @@ -350,106 +439,122 @@ type ClientHeader struct { // or : . Authority string `protobuf:"bytes,3,opt,name=authority,proto3" json:"authority,omitempty"` // the RPC timeout - Timeout *duration.Duration `protobuf:"bytes,4,opt,name=timeout,proto3" json:"timeout,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Timeout *durationpb.Duration `protobuf:"bytes,4,opt,name=timeout,proto3" json:"timeout,omitempty"` } -func (m *ClientHeader) Reset() { *m = ClientHeader{} } -func (m *ClientHeader) String() string { return proto.CompactTextString(m) } -func (*ClientHeader) ProtoMessage() {} -func (*ClientHeader) Descriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{1} +func (x *ClientHeader) Reset() { + *x = ClientHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *ClientHeader) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ClientHeader.Unmarshal(m, b) -} -func (m *ClientHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ClientHeader.Marshal(b, m, deterministic) -} -func (m *ClientHeader) XXX_Merge(src proto.Message) { - xxx_messageInfo_ClientHeader.Merge(m, src) -} -func (m *ClientHeader) XXX_Size() int { - return xxx_messageInfo_ClientHeader.Size(m) +func (x *ClientHeader) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *ClientHeader) XXX_DiscardUnknown() { - xxx_messageInfo_ClientHeader.DiscardUnknown(m) + +func (*ClientHeader) ProtoMessage() {} + +func (x *ClientHeader) ProtoReflect() protoreflect.Message { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_ClientHeader proto.InternalMessageInfo +// Deprecated: Use ClientHeader.ProtoReflect.Descriptor instead. +func (*ClientHeader) Descriptor() ([]byte, []int) { + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{1} +} -func (m *ClientHeader) GetMetadata() *Metadata { - if m != nil { - return m.Metadata +func (x *ClientHeader) GetMetadata() *Metadata { + if x != nil { + return x.Metadata } return nil } -func (m *ClientHeader) GetMethodName() string { - if m != nil { - return m.MethodName +func (x *ClientHeader) GetMethodName() string { + if x != nil { + return x.MethodName } return "" } -func (m *ClientHeader) GetAuthority() string { - if m != nil { - return m.Authority +func (x *ClientHeader) GetAuthority() string { + if x != nil { + return x.Authority } return "" } -func (m *ClientHeader) GetTimeout() *duration.Duration { - if m != nil { - return m.Timeout +func (x *ClientHeader) GetTimeout() *durationpb.Duration { + if x != nil { + return x.Timeout } return nil } type ServerHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // This contains only the metadata from the application. - Metadata *Metadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Metadata *Metadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` } -func (m *ServerHeader) Reset() { *m = ServerHeader{} } -func (m *ServerHeader) String() string { return proto.CompactTextString(m) } -func (*ServerHeader) ProtoMessage() {} -func (*ServerHeader) Descriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{2} +func (x *ServerHeader) Reset() { + *x = ServerHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *ServerHeader) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ServerHeader.Unmarshal(m, b) -} -func (m *ServerHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ServerHeader.Marshal(b, m, deterministic) +func (x *ServerHeader) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *ServerHeader) XXX_Merge(src proto.Message) { - xxx_messageInfo_ServerHeader.Merge(m, src) -} -func (m *ServerHeader) XXX_Size() int { - return xxx_messageInfo_ServerHeader.Size(m) -} -func (m *ServerHeader) XXX_DiscardUnknown() { - xxx_messageInfo_ServerHeader.DiscardUnknown(m) + +func (*ServerHeader) ProtoMessage() {} + +func (x *ServerHeader) ProtoReflect() protoreflect.Message { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_ServerHeader proto.InternalMessageInfo +// Deprecated: Use ServerHeader.ProtoReflect.Descriptor instead. +func (*ServerHeader) Descriptor() ([]byte, []int) { + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{2} +} -func (m *ServerHeader) GetMetadata() *Metadata { - if m != nil { - return m.Metadata +func (x *ServerHeader) GetMetadata() *Metadata { + if x != nil { + return x.Metadata } return nil } type Trailer struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // This contains only the metadata from the application. Metadata *Metadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` // The gRPC status code. @@ -459,112 +564,124 @@ type Trailer struct { StatusMessage string `protobuf:"bytes,3,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` // The value of the 'grpc-status-details-bin' metadata key. If // present, this is always an encoded 'google.rpc.Status' message. - StatusDetails []byte `protobuf:"bytes,4,opt,name=status_details,json=statusDetails,proto3" json:"status_details,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + StatusDetails []byte `protobuf:"bytes,4,opt,name=status_details,json=statusDetails,proto3" json:"status_details,omitempty"` } -func (m *Trailer) Reset() { *m = Trailer{} } -func (m *Trailer) String() string { return proto.CompactTextString(m) } -func (*Trailer) ProtoMessage() {} -func (*Trailer) Descriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{3} +func (x *Trailer) Reset() { + *x = Trailer{} + if protoimpl.UnsafeEnabled { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Trailer) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Trailer.Unmarshal(m, b) -} -func (m *Trailer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Trailer.Marshal(b, m, deterministic) -} -func (m *Trailer) XXX_Merge(src proto.Message) { - xxx_messageInfo_Trailer.Merge(m, src) -} -func (m *Trailer) XXX_Size() int { - return xxx_messageInfo_Trailer.Size(m) +func (x *Trailer) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Trailer) XXX_DiscardUnknown() { - xxx_messageInfo_Trailer.DiscardUnknown(m) + +func (*Trailer) ProtoMessage() {} + +func (x *Trailer) ProtoReflect() protoreflect.Message { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Trailer proto.InternalMessageInfo +// Deprecated: Use Trailer.ProtoReflect.Descriptor instead. +func (*Trailer) Descriptor() ([]byte, []int) { + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{3} +} -func (m *Trailer) GetMetadata() *Metadata { - if m != nil { - return m.Metadata +func (x *Trailer) GetMetadata() *Metadata { + if x != nil { + return x.Metadata } return nil } -func (m *Trailer) GetStatusCode() uint32 { - if m != nil { - return m.StatusCode +func (x *Trailer) GetStatusCode() uint32 { + if x != nil { + return x.StatusCode } return 0 } -func (m *Trailer) GetStatusMessage() string { - if m != nil { - return m.StatusMessage +func (x *Trailer) GetStatusMessage() string { + if x != nil { + return x.StatusMessage } return "" } -func (m *Trailer) GetStatusDetails() []byte { - if m != nil { - return m.StatusDetails +func (x *Trailer) GetStatusDetails() []byte { + if x != nil { + return x.StatusDetails } return nil } // Message payload, used by CLIENT_MESSAGE and SERVER_MESSAGE type Message struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // Length of the message. It may not be the same as the length of the // data field, as the logging payload can be truncated or omitted. Length uint32 `protobuf:"varint,1,opt,name=length,proto3" json:"length,omitempty"` // May be truncated or omitted. - Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` } -func (m *Message) Reset() { *m = Message{} } -func (m *Message) String() string { return proto.CompactTextString(m) } -func (*Message) ProtoMessage() {} -func (*Message) Descriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{4} +func (x *Message) Reset() { + *x = Message{} + if protoimpl.UnsafeEnabled { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Message) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Message.Unmarshal(m, b) -} -func (m *Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Message.Marshal(b, m, deterministic) +func (x *Message) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Message) XXX_Merge(src proto.Message) { - xxx_messageInfo_Message.Merge(m, src) -} -func (m *Message) XXX_Size() int { - return xxx_messageInfo_Message.Size(m) -} -func (m *Message) XXX_DiscardUnknown() { - xxx_messageInfo_Message.DiscardUnknown(m) + +func (*Message) ProtoMessage() {} + +func (x *Message) ProtoReflect() protoreflect.Message { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Message proto.InternalMessageInfo +// Deprecated: Use Message.ProtoReflect.Descriptor instead. +func (*Message) Descriptor() ([]byte, []int) { + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{4} +} -func (m *Message) GetLength() uint32 { - if m != nil { - return m.Length +func (x *Message) GetLength() uint32 { + if x != nil { + return x.Length } return 0 } -func (m *Message) GetData() []byte { - if m != nil { - return m.Data +func (x *Message) GetData() []byte { + if x != nil { + return x.Data } return nil } @@ -591,222 +708,480 @@ func (m *Message) GetData() []byte { // header is just a normal metadata key. // The pair will not count towards the size limit. type Metadata struct { - Entry []*MetadataEntry `protobuf:"bytes,1,rep,name=entry,proto3" json:"entry,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *Metadata) Reset() { *m = Metadata{} } -func (m *Metadata) String() string { return proto.CompactTextString(m) } -func (*Metadata) ProtoMessage() {} -func (*Metadata) Descriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{5} + Entry []*MetadataEntry `protobuf:"bytes,1,rep,name=entry,proto3" json:"entry,omitempty"` } -func (m *Metadata) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Metadata.Unmarshal(m, b) -} -func (m *Metadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Metadata.Marshal(b, m, deterministic) -} -func (m *Metadata) XXX_Merge(src proto.Message) { - xxx_messageInfo_Metadata.Merge(m, src) +func (x *Metadata) Reset() { + *x = Metadata{} + if protoimpl.UnsafeEnabled { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Metadata) XXX_Size() int { - return xxx_messageInfo_Metadata.Size(m) + +func (x *Metadata) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Metadata) XXX_DiscardUnknown() { - xxx_messageInfo_Metadata.DiscardUnknown(m) + +func (*Metadata) ProtoMessage() {} + +func (x *Metadata) ProtoReflect() protoreflect.Message { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Metadata proto.InternalMessageInfo +// Deprecated: Use Metadata.ProtoReflect.Descriptor instead. +func (*Metadata) Descriptor() ([]byte, []int) { + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{5} +} -func (m *Metadata) GetEntry() []*MetadataEntry { - if m != nil { - return m.Entry +func (x *Metadata) GetEntry() []*MetadataEntry { + if x != nil { + return x.Entry } return nil } // A metadata key value pair type MetadataEntry struct { - Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` - Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *MetadataEntry) Reset() { *m = MetadataEntry{} } -func (m *MetadataEntry) String() string { return proto.CompactTextString(m) } -func (*MetadataEntry) ProtoMessage() {} -func (*MetadataEntry) Descriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{6} + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } -func (m *MetadataEntry) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_MetadataEntry.Unmarshal(m, b) -} -func (m *MetadataEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_MetadataEntry.Marshal(b, m, deterministic) -} -func (m *MetadataEntry) XXX_Merge(src proto.Message) { - xxx_messageInfo_MetadataEntry.Merge(m, src) +func (x *MetadataEntry) Reset() { + *x = MetadataEntry{} + if protoimpl.UnsafeEnabled { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *MetadataEntry) XXX_Size() int { - return xxx_messageInfo_MetadataEntry.Size(m) + +func (x *MetadataEntry) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *MetadataEntry) XXX_DiscardUnknown() { - xxx_messageInfo_MetadataEntry.DiscardUnknown(m) + +func (*MetadataEntry) ProtoMessage() {} + +func (x *MetadataEntry) ProtoReflect() protoreflect.Message { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_MetadataEntry proto.InternalMessageInfo +// Deprecated: Use MetadataEntry.ProtoReflect.Descriptor instead. +func (*MetadataEntry) Descriptor() ([]byte, []int) { + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{6} +} -func (m *MetadataEntry) GetKey() string { - if m != nil { - return m.Key +func (x *MetadataEntry) GetKey() string { + if x != nil { + return x.Key } return "" } -func (m *MetadataEntry) GetValue() []byte { - if m != nil { - return m.Value +func (x *MetadataEntry) GetValue() []byte { + if x != nil { + return x.Value } return nil } // Address information type Address struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + Type Address_Type `protobuf:"varint,1,opt,name=type,proto3,enum=grpc.binarylog.v1.Address_Type" json:"type,omitempty"` Address string `protobuf:"bytes,2,opt,name=address,proto3" json:"address,omitempty"` // only for TYPE_IPV4 and TYPE_IPV6 - IpPort uint32 `protobuf:"varint,3,opt,name=ip_port,json=ipPort,proto3" json:"ip_port,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + IpPort uint32 `protobuf:"varint,3,opt,name=ip_port,json=ipPort,proto3" json:"ip_port,omitempty"` } -func (m *Address) Reset() { *m = Address{} } -func (m *Address) String() string { return proto.CompactTextString(m) } -func (*Address) ProtoMessage() {} -func (*Address) Descriptor() ([]byte, []int) { - return fileDescriptor_b7972e58de45083a, []int{7} +func (x *Address) Reset() { + *x = Address{} + if protoimpl.UnsafeEnabled { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Address) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Address.Unmarshal(m, b) +func (x *Address) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Address) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Address.Marshal(b, m, deterministic) -} -func (m *Address) XXX_Merge(src proto.Message) { - xxx_messageInfo_Address.Merge(m, src) -} -func (m *Address) XXX_Size() int { - return xxx_messageInfo_Address.Size(m) -} -func (m *Address) XXX_DiscardUnknown() { - xxx_messageInfo_Address.DiscardUnknown(m) + +func (*Address) ProtoMessage() {} + +func (x *Address) ProtoReflect() protoreflect.Message { + mi := &file_grpc_binlog_v1_binarylog_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Address proto.InternalMessageInfo +// Deprecated: Use Address.ProtoReflect.Descriptor instead. +func (*Address) Descriptor() ([]byte, []int) { + return file_grpc_binlog_v1_binarylog_proto_rawDescGZIP(), []int{7} +} -func (m *Address) GetType() Address_Type { - if m != nil { - return m.Type +func (x *Address) GetType() Address_Type { + if x != nil { + return x.Type } return Address_TYPE_UNKNOWN } -func (m *Address) GetAddress() string { - if m != nil { - return m.Address +func (x *Address) GetAddress() string { + if x != nil { + return x.Address } return "" } -func (m *Address) GetIpPort() uint32 { - if m != nil { - return m.IpPort +func (x *Address) GetIpPort() uint32 { + if x != nil { + return x.IpPort } return 0 } -func init() { - proto.RegisterEnum("grpc.binarylog.v1.GrpcLogEntry_EventType", GrpcLogEntry_EventType_name, GrpcLogEntry_EventType_value) - proto.RegisterEnum("grpc.binarylog.v1.GrpcLogEntry_Logger", GrpcLogEntry_Logger_name, GrpcLogEntry_Logger_value) - proto.RegisterEnum("grpc.binarylog.v1.Address_Type", Address_Type_name, Address_Type_value) - proto.RegisterType((*GrpcLogEntry)(nil), "grpc.binarylog.v1.GrpcLogEntry") - proto.RegisterType((*ClientHeader)(nil), "grpc.binarylog.v1.ClientHeader") - proto.RegisterType((*ServerHeader)(nil), "grpc.binarylog.v1.ServerHeader") - proto.RegisterType((*Trailer)(nil), "grpc.binarylog.v1.Trailer") - proto.RegisterType((*Message)(nil), "grpc.binarylog.v1.Message") - proto.RegisterType((*Metadata)(nil), "grpc.binarylog.v1.Metadata") - proto.RegisterType((*MetadataEntry)(nil), "grpc.binarylog.v1.MetadataEntry") - proto.RegisterType((*Address)(nil), "grpc.binarylog.v1.Address") -} - -func init() { proto.RegisterFile("grpc/binlog/v1/binarylog.proto", fileDescriptor_b7972e58de45083a) } - -var fileDescriptor_b7972e58de45083a = []byte{ - // 904 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0x51, 0x6f, 0xe3, 0x44, - 0x10, 0xae, 0xdb, 0x34, 0x6e, 0x26, 0x49, 0xe5, 0xae, 0xca, 0x9d, 0xaf, 0x94, 0x6b, 0x64, 0x09, - 0x14, 0x84, 0xe4, 0xa8, 0x29, 0xd7, 0xe3, 0x05, 0xa4, 0x24, 0xf5, 0xa5, 0x11, 0xb9, 0x34, 0xda, - 0xe4, 0x7a, 0x80, 0x90, 0xac, 0x6d, 0xbc, 0x38, 0x16, 0x8e, 0xd7, 0xac, 0x37, 0x41, 0xf9, 0x59, - 0xbc, 0x21, 0xdd, 0xef, 0xe2, 0x1d, 0x79, 0xd7, 0x4e, 0x4d, 0xd3, 0x82, 0xc4, 0xbd, 0xed, 0x7c, - 0xf3, 0xcd, 0x37, 0xbb, 0xe3, 0x99, 0x31, 0xbc, 0xf4, 0x79, 0x3c, 0x6b, 0xdd, 0x05, 0x51, 0xc8, - 0xfc, 0xd6, 0xea, 0x3c, 0x3d, 0x11, 0xbe, 0x0e, 0x99, 0x6f, 0xc7, 0x9c, 0x09, 0x86, 0x8e, 0x52, - 0xbf, 0x7d, 0x8f, 0xae, 0xce, 0x4f, 0x5e, 0xfa, 0x8c, 0xf9, 0x21, 0x6d, 0x49, 0xc2, 0xdd, 0xf2, - 0x97, 0x96, 0xb7, 0xe4, 0x44, 0x04, 0x2c, 0x52, 0x21, 0x27, 0x67, 0x0f, 0xfd, 0x22, 0x58, 0xd0, - 0x44, 0x90, 0x45, 0xac, 0x08, 0xd6, 0x07, 0x1d, 0x6a, 0x7d, 0x1e, 0xcf, 0x86, 0xcc, 0x77, 0x22, - 0xc1, 0xd7, 0xe8, 0x1b, 0xa8, 0x6c, 0x38, 0xa6, 0xd6, 0xd0, 0x9a, 0xd5, 0xf6, 0x89, 0xad, 0x54, - 0xec, 0x5c, 0xc5, 0x9e, 0xe6, 0x0c, 0x7c, 0x4f, 0x46, 0xcf, 0x41, 0x9f, 0x91, 0x30, 0x74, 0x03, - 0xcf, 0xdc, 0x6d, 0x68, 0xcd, 0x12, 0x2e, 0xa7, 0xe6, 0xc0, 0x43, 0xaf, 0xe0, 0x79, 0x42, 0x7f, - 0x5b, 0xd2, 0x68, 0x46, 0xdd, 0xc0, 0x73, 0x7f, 0x0f, 0xc4, 0x3c, 0x88, 0xdc, 0xd4, 0x69, 0xee, - 0x49, 0xe2, 0x71, 0xee, 0x1e, 0x78, 0xef, 0xa5, 0xb3, 0x47, 0xc2, 0x10, 0x7d, 0x0b, 0x25, 0xb1, - 0x8e, 0xa9, 0x59, 0x6a, 0x68, 0xcd, 0xc3, 0xf6, 0x97, 0xf6, 0xd6, 0xeb, 0xed, 0xe2, 0xc5, 0x6d, - 0x67, 0x45, 0x23, 0x31, 0x5d, 0xc7, 0x14, 0xcb, 0x30, 0xf4, 0x1d, 0x94, 0x43, 0xe6, 0xfb, 0x94, - 0x9b, 0xfb, 0x52, 0xe0, 0x8b, 0xff, 0x12, 0x18, 0x4a, 0x36, 0xce, 0xa2, 0xd0, 0x1b, 0xa8, 0xcf, - 0xc2, 0x80, 0x46, 0xc2, 0x9d, 0x53, 0xe2, 0x51, 0x6e, 0x96, 0x65, 0x31, 0xce, 0x1e, 0x91, 0xe9, - 0x49, 0xde, 0xb5, 0xa4, 0x5d, 0xef, 0xe0, 0xda, 0xac, 0x60, 0xa7, 0x3a, 0x09, 0xe5, 0x2b, 0xca, - 0x73, 0x1d, 0xfd, 0x49, 0x9d, 0x89, 0xe4, 0xdd, 0xeb, 0x24, 0x05, 0x1b, 0x5d, 0x82, 0xbe, 0xa0, - 0x49, 0x42, 0x7c, 0x6a, 0x1e, 0xe4, 0x9f, 0x65, 0x4b, 0xe1, 0xad, 0x62, 0x5c, 0xef, 0xe0, 0x9c, - 0x9c, 0xc6, 0x09, 0x4e, 0x82, 0x90, 0x72, 0xb3, 0xf2, 0x64, 0xdc, 0x54, 0x31, 0xd2, 0xb8, 0x8c, - 0x8c, 0xbe, 0x82, 0xa3, 0x98, 0xac, 0x43, 0x46, 0x3c, 0x57, 0xf0, 0x65, 0x34, 0x23, 0x82, 0x7a, - 0x26, 0x34, 0xb4, 0xe6, 0x01, 0x36, 0x32, 0xc7, 0x34, 0xc7, 0x91, 0x0d, 0xa5, 0x98, 0x52, 0x6e, - 0x56, 0x9f, 0xcc, 0xd0, 0xf1, 0x3c, 0x4e, 0x93, 0x04, 0x4b, 0x9e, 0xf5, 0x97, 0x06, 0x95, 0xcd, - 0x07, 0x43, 0xcf, 0x00, 0x39, 0xb7, 0xce, 0x68, 0xea, 0x4e, 0x7f, 0x1c, 0x3b, 0xee, 0xbb, 0xd1, - 0xf7, 0xa3, 0x9b, 0xf7, 0x23, 0x63, 0x07, 0x9d, 0x82, 0x59, 0xc0, 0x7b, 0xc3, 0x41, 0x7a, 0xbe, - 0x76, 0x3a, 0x57, 0x0e, 0x36, 0xb4, 0x07, 0xde, 0x89, 0x83, 0x6f, 0x1d, 0x9c, 0x7b, 0x77, 0xd1, - 0x67, 0xf0, 0x62, 0x3b, 0xf6, 0xad, 0x33, 0x99, 0x74, 0xfa, 0x8e, 0xb1, 0xf7, 0xc0, 0x9d, 0x05, - 0xe7, 0xee, 0x12, 0x6a, 0xc0, 0xe9, 0x23, 0x99, 0x3b, 0xc3, 0x37, 0x6e, 0x6f, 0x78, 0x33, 0x71, - 0x8c, 0xfd, 0xc7, 0x05, 0xa6, 0xb8, 0x33, 0x18, 0x3a, 0xd8, 0x28, 0xa3, 0x4f, 0xe0, 0xa8, 0x28, - 0xd0, 0x19, 0xf5, 0x9c, 0xa1, 0xa1, 0x5b, 0x5d, 0x28, 0xab, 0x36, 0x43, 0x08, 0x0e, 0x87, 0x37, - 0xfd, 0xbe, 0x83, 0x0b, 0xef, 0x3d, 0x82, 0x7a, 0x86, 0xa9, 0x8c, 0x86, 0x56, 0x80, 0x54, 0x0a, - 0x63, 0xb7, 0x5b, 0x01, 0x3d, 0xab, 0xbf, 0xf5, 0x41, 0x83, 0x5a, 0xb1, 0xf9, 0xd0, 0x6b, 0x38, - 0x58, 0x50, 0x41, 0x3c, 0x22, 0x48, 0x36, 0xbc, 0x9f, 0x3e, 0xda, 0x25, 0x8a, 0x82, 0x37, 0x64, - 0x74, 0x06, 0xd5, 0x05, 0x15, 0x73, 0xe6, 0xb9, 0x11, 0x59, 0x50, 0x39, 0xc0, 0x15, 0x0c, 0x0a, - 0x1a, 0x91, 0x05, 0x45, 0xa7, 0x50, 0x21, 0x4b, 0x31, 0x67, 0x3c, 0x10, 0x6b, 0x39, 0xb6, 0x15, - 0x7c, 0x0f, 0xa0, 0x0b, 0xd0, 0xd3, 0x45, 0xc0, 0x96, 0x42, 0x8e, 0x6b, 0xb5, 0xfd, 0x62, 0x6b, - 0x67, 0x5c, 0x65, 0x9b, 0x09, 0xe7, 0x4c, 0xab, 0x0f, 0xb5, 0x62, 0xc7, 0xff, 0xef, 0xcb, 0x5b, - 0x7f, 0x68, 0xa0, 0x67, 0x1d, 0xfc, 0x51, 0x15, 0x48, 0x04, 0x11, 0xcb, 0xc4, 0x9d, 0x31, 0x4f, - 0x55, 0xa0, 0x8e, 0x41, 0x41, 0x3d, 0xe6, 0x51, 0xf4, 0x39, 0x1c, 0x66, 0x84, 0x7c, 0x0e, 0x55, - 0x19, 0xea, 0x0a, 0xcd, 0x46, 0xaf, 0x40, 0xf3, 0xa8, 0x20, 0x41, 0x98, 0xc8, 0x8a, 0xd4, 0x72, - 0xda, 0x95, 0x02, 0xad, 0x57, 0xa0, 0xe7, 0x11, 0xcf, 0xa0, 0x1c, 0xd2, 0xc8, 0x17, 0x73, 0x79, - 0xe1, 0x3a, 0xce, 0x2c, 0x84, 0xa0, 0x24, 0x9f, 0xb1, 0x2b, 0xe3, 0xe5, 0xd9, 0xea, 0xc2, 0x41, - 0x7e, 0x77, 0x74, 0x09, 0xfb, 0x34, 0xdd, 0x5c, 0xa6, 0xd6, 0xd8, 0x6b, 0x56, 0xdb, 0x8d, 0x7f, - 0x79, 0xa7, 0xdc, 0x70, 0x58, 0xd1, 0xad, 0xd7, 0x50, 0xff, 0x07, 0x8e, 0x0c, 0xd8, 0xfb, 0x95, - 0xae, 0x65, 0xf6, 0x0a, 0x4e, 0x8f, 0xe8, 0x18, 0xf6, 0x57, 0x24, 0x5c, 0xd2, 0x2c, 0xb7, 0x32, - 0xac, 0x3f, 0x35, 0xd0, 0xb3, 0x39, 0x46, 0x17, 0xd9, 0x76, 0xd6, 0xe4, 0x72, 0x3d, 0x7b, 0x7a, - 0xe2, 0xed, 0xc2, 0x4e, 0x36, 0x41, 0x27, 0x0a, 0xcd, 0x3a, 0x2c, 0x37, 0xd3, 0x9f, 0x47, 0x10, - 0xbb, 0x31, 0xe3, 0x42, 0x56, 0xb5, 0x8e, 0xcb, 0x41, 0x3c, 0x66, 0x5c, 0x58, 0x0e, 0x94, 0xe4, - 0x8e, 0x30, 0xa0, 0xf6, 0x60, 0x3b, 0xd4, 0xa1, 0x22, 0x91, 0xc1, 0xf8, 0xf6, 0x6b, 0x43, 0x2b, - 0x9a, 0x97, 0xc6, 0xee, 0xc6, 0x7c, 0x37, 0x1a, 0xfc, 0x60, 0xec, 0x75, 0x7f, 0x86, 0xe3, 0x80, - 0x6d, 0x5f, 0xb2, 0x7b, 0xd8, 0x95, 0xd6, 0x90, 0xf9, 0xe3, 0xb4, 0x51, 0xc7, 0xda, 0x4f, 0xed, - 0xac, 0x71, 0x7d, 0x16, 0x92, 0xc8, 0xb7, 0x19, 0xf7, 0x5b, 0xf9, 0x7f, 0x59, 0x85, 0x49, 0xd3, - 0xdd, 0x98, 0xee, 0xea, 0xfc, 0xae, 0x2c, 0xbb, 0xfc, 0xe2, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, - 0x10, 0x93, 0x68, 0x41, 0xc2, 0x07, 0x00, 0x00, +var File_grpc_binlog_v1_binarylog_proto protoreflect.FileDescriptor + +var file_grpc_binlog_v1_binarylog_proto_rawDesc = []byte{ + 0x0a, 0x1e, 0x67, 0x72, 0x70, 0x63, 0x2f, 0x62, 0x69, 0x6e, 0x6c, 0x6f, 0x67, 0x2f, 0x76, 0x31, + 0x2f, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x11, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, + 0x2e, 0x76, 0x31, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xbb, 0x07, 0x0a, 0x0c, 0x47, 0x72, 0x70, 0x63, 0x4c, 0x6f, 0x67, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, + 0x17, 0x0a, 0x07, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, + 0x52, 0x06, 0x63, 0x61, 0x6c, 0x6c, 0x49, 0x64, 0x12, 0x35, 0x0a, 0x17, 0x73, 0x65, 0x71, 0x75, + 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x5f, 0x77, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x5f, 0x63, + 0x61, 0x6c, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x14, 0x73, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x65, 0x49, 0x64, 0x57, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x43, 0x61, 0x6c, 0x6c, 0x12, + 0x3d, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x29, 0x2e, + 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x76, + 0x31, 0x2e, 0x47, 0x72, 0x70, 0x63, 0x4c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x2e, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x3e, + 0x0a, 0x06, 0x6c, 0x6f, 0x67, 0x67, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, + 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, + 0x76, 0x31, 0x2e, 0x47, 0x72, 0x70, 0x63, 0x4c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x2e, + 0x4c, 0x6f, 0x67, 0x67, 0x65, 0x72, 0x52, 0x06, 0x6c, 0x6f, 0x67, 0x67, 0x65, 0x72, 0x12, 0x46, + 0x0a, 0x0d, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, + 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, + 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, 0x00, 0x52, 0x0c, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, + 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x46, 0x0a, 0x0d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x5f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, + 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x76, + 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, 0x00, + 0x52, 0x0c, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x36, + 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, + 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x48, 0x00, 0x52, 0x07, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x36, 0x0a, 0x07, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x65, + 0x72, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, + 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x69, + 0x6c, 0x65, 0x72, 0x48, 0x00, 0x52, 0x07, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x65, 0x72, 0x12, 0x2b, + 0x0a, 0x11, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x74, 0x72, 0x75, 0x6e, 0x63, 0x61, + 0x74, 0x65, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x70, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x54, 0x72, 0x75, 0x6e, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x2e, 0x0a, 0x04, 0x70, + 0x65, 0x65, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x72, 0x70, 0x63, + 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x64, + 0x64, 0x72, 0x65, 0x73, 0x73, 0x52, 0x04, 0x70, 0x65, 0x65, 0x72, 0x22, 0xf5, 0x01, 0x0a, 0x09, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x12, 0x45, 0x56, 0x45, + 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, + 0x00, 0x12, 0x1c, 0x0a, 0x18, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, + 0x43, 0x4c, 0x49, 0x45, 0x4e, 0x54, 0x5f, 0x48, 0x45, 0x41, 0x44, 0x45, 0x52, 0x10, 0x01, 0x12, + 0x1c, 0x0a, 0x18, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x45, + 0x52, 0x56, 0x45, 0x52, 0x5f, 0x48, 0x45, 0x41, 0x44, 0x45, 0x52, 0x10, 0x02, 0x12, 0x1d, 0x0a, + 0x19, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4c, 0x49, 0x45, + 0x4e, 0x54, 0x5f, 0x4d, 0x45, 0x53, 0x53, 0x41, 0x47, 0x45, 0x10, 0x03, 0x12, 0x1d, 0x0a, 0x19, + 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x45, + 0x52, 0x5f, 0x4d, 0x45, 0x53, 0x53, 0x41, 0x47, 0x45, 0x10, 0x04, 0x12, 0x20, 0x0a, 0x1c, 0x45, + 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4c, 0x49, 0x45, 0x4e, 0x54, + 0x5f, 0x48, 0x41, 0x4c, 0x46, 0x5f, 0x43, 0x4c, 0x4f, 0x53, 0x45, 0x10, 0x05, 0x12, 0x1d, 0x0a, + 0x19, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x45, 0x52, 0x56, + 0x45, 0x52, 0x5f, 0x54, 0x52, 0x41, 0x49, 0x4c, 0x45, 0x52, 0x10, 0x06, 0x12, 0x15, 0x0a, 0x11, + 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x41, 0x4e, 0x43, 0x45, + 0x4c, 0x10, 0x07, 0x22, 0x42, 0x0a, 0x06, 0x4c, 0x6f, 0x67, 0x67, 0x65, 0x72, 0x12, 0x12, 0x0a, + 0x0e, 0x4c, 0x4f, 0x47, 0x47, 0x45, 0x52, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, + 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x4c, 0x4f, 0x47, 0x47, 0x45, 0x52, 0x5f, 0x43, 0x4c, 0x49, 0x45, + 0x4e, 0x54, 0x10, 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x4c, 0x4f, 0x47, 0x47, 0x45, 0x52, 0x5f, 0x53, + 0x45, 0x52, 0x56, 0x45, 0x52, 0x10, 0x02, 0x42, 0x09, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x22, 0xbb, 0x01, 0x0a, 0x0c, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x48, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, + 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x0a, 0x0b, + 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, + 0x09, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x33, 0x0a, 0x07, 0x74, + 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, + 0x22, 0x47, 0x0a, 0x0c, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, + 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xb1, 0x01, 0x0a, 0x07, 0x54, 0x72, + 0x61, 0x69, 0x6c, 0x65, 0x72, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, + 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1f, + 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x12, + 0x25, 0x0a, 0x0e, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, + 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x35, 0x0a, + 0x07, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x42, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x12, 0x36, 0x0a, 0x05, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x20, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, + 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x05, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x22, 0x37, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x22, 0xb8, 0x01, 0x0a, 0x07, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, 0x33, 0x0a, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1f, 0x2e, 0x67, 0x72, + 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2e, 0x76, 0x31, 0x2e, + 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, + 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, 0x17, 0x0a, 0x07, + 0x69, 0x70, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x69, + 0x70, 0x50, 0x6f, 0x72, 0x74, 0x22, 0x45, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, + 0x0c, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, + 0x0d, 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x50, 0x56, 0x34, 0x10, 0x01, 0x12, 0x0d, + 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x50, 0x56, 0x36, 0x10, 0x02, 0x12, 0x0d, 0x0a, + 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x49, 0x58, 0x10, 0x03, 0x42, 0x5c, 0x0a, 0x14, + 0x69, 0x6f, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, + 0x67, 0x2e, 0x76, 0x31, 0x42, 0x0e, 0x42, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x4c, 0x6f, 0x67, 0x50, + 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x32, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, + 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x2f, 0x62, + 0x69, 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x62, 0x69, + 0x6e, 0x61, 0x72, 0x79, 0x6c, 0x6f, 0x67, 0x5f, 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, +} + +var ( + file_grpc_binlog_v1_binarylog_proto_rawDescOnce sync.Once + file_grpc_binlog_v1_binarylog_proto_rawDescData = file_grpc_binlog_v1_binarylog_proto_rawDesc +) + +func file_grpc_binlog_v1_binarylog_proto_rawDescGZIP() []byte { + file_grpc_binlog_v1_binarylog_proto_rawDescOnce.Do(func() { + file_grpc_binlog_v1_binarylog_proto_rawDescData = protoimpl.X.CompressGZIP(file_grpc_binlog_v1_binarylog_proto_rawDescData) + }) + return file_grpc_binlog_v1_binarylog_proto_rawDescData +} + +var file_grpc_binlog_v1_binarylog_proto_enumTypes = make([]protoimpl.EnumInfo, 3) +var file_grpc_binlog_v1_binarylog_proto_msgTypes = make([]protoimpl.MessageInfo, 8) +var file_grpc_binlog_v1_binarylog_proto_goTypes = []interface{}{ + (GrpcLogEntry_EventType)(0), // 0: grpc.binarylog.v1.GrpcLogEntry.EventType + (GrpcLogEntry_Logger)(0), // 1: grpc.binarylog.v1.GrpcLogEntry.Logger + (Address_Type)(0), // 2: grpc.binarylog.v1.Address.Type + (*GrpcLogEntry)(nil), // 3: grpc.binarylog.v1.GrpcLogEntry + (*ClientHeader)(nil), // 4: grpc.binarylog.v1.ClientHeader + (*ServerHeader)(nil), // 5: grpc.binarylog.v1.ServerHeader + (*Trailer)(nil), // 6: grpc.binarylog.v1.Trailer + (*Message)(nil), // 7: grpc.binarylog.v1.Message + (*Metadata)(nil), // 8: grpc.binarylog.v1.Metadata + (*MetadataEntry)(nil), // 9: grpc.binarylog.v1.MetadataEntry + (*Address)(nil), // 10: grpc.binarylog.v1.Address + (*timestamppb.Timestamp)(nil), // 11: google.protobuf.Timestamp + (*durationpb.Duration)(nil), // 12: google.protobuf.Duration +} +var file_grpc_binlog_v1_binarylog_proto_depIdxs = []int32{ + 11, // 0: grpc.binarylog.v1.GrpcLogEntry.timestamp:type_name -> google.protobuf.Timestamp + 0, // 1: grpc.binarylog.v1.GrpcLogEntry.type:type_name -> grpc.binarylog.v1.GrpcLogEntry.EventType + 1, // 2: grpc.binarylog.v1.GrpcLogEntry.logger:type_name -> grpc.binarylog.v1.GrpcLogEntry.Logger + 4, // 3: grpc.binarylog.v1.GrpcLogEntry.client_header:type_name -> grpc.binarylog.v1.ClientHeader + 5, // 4: grpc.binarylog.v1.GrpcLogEntry.server_header:type_name -> grpc.binarylog.v1.ServerHeader + 7, // 5: grpc.binarylog.v1.GrpcLogEntry.message:type_name -> grpc.binarylog.v1.Message + 6, // 6: grpc.binarylog.v1.GrpcLogEntry.trailer:type_name -> grpc.binarylog.v1.Trailer + 10, // 7: grpc.binarylog.v1.GrpcLogEntry.peer:type_name -> grpc.binarylog.v1.Address + 8, // 8: grpc.binarylog.v1.ClientHeader.metadata:type_name -> grpc.binarylog.v1.Metadata + 12, // 9: grpc.binarylog.v1.ClientHeader.timeout:type_name -> google.protobuf.Duration + 8, // 10: grpc.binarylog.v1.ServerHeader.metadata:type_name -> grpc.binarylog.v1.Metadata + 8, // 11: grpc.binarylog.v1.Trailer.metadata:type_name -> grpc.binarylog.v1.Metadata + 9, // 12: grpc.binarylog.v1.Metadata.entry:type_name -> grpc.binarylog.v1.MetadataEntry + 2, // 13: grpc.binarylog.v1.Address.type:type_name -> grpc.binarylog.v1.Address.Type + 14, // [14:14] is the sub-list for method output_type + 14, // [14:14] is the sub-list for method input_type + 14, // [14:14] is the sub-list for extension type_name + 14, // [14:14] is the sub-list for extension extendee + 0, // [0:14] is the sub-list for field type_name +} + +func init() { file_grpc_binlog_v1_binarylog_proto_init() } +func file_grpc_binlog_v1_binarylog_proto_init() { + if File_grpc_binlog_v1_binarylog_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_grpc_binlog_v1_binarylog_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GrpcLogEntry); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_grpc_binlog_v1_binarylog_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ClientHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_grpc_binlog_v1_binarylog_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServerHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_grpc_binlog_v1_binarylog_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Trailer); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_grpc_binlog_v1_binarylog_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Message); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_grpc_binlog_v1_binarylog_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Metadata); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_grpc_binlog_v1_binarylog_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MetadataEntry); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_grpc_binlog_v1_binarylog_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Address); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_grpc_binlog_v1_binarylog_proto_msgTypes[0].OneofWrappers = []interface{}{ + (*GrpcLogEntry_ClientHeader)(nil), + (*GrpcLogEntry_ServerHeader)(nil), + (*GrpcLogEntry_Message)(nil), + (*GrpcLogEntry_Trailer)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_grpc_binlog_v1_binarylog_proto_rawDesc, + NumEnums: 3, + NumMessages: 8, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_grpc_binlog_v1_binarylog_proto_goTypes, + DependencyIndexes: file_grpc_binlog_v1_binarylog_proto_depIdxs, + EnumInfos: file_grpc_binlog_v1_binarylog_proto_enumTypes, + MessageInfos: file_grpc_binlog_v1_binarylog_proto_msgTypes, + }.Build() + File_grpc_binlog_v1_binarylog_proto = out.File + file_grpc_binlog_v1_binarylog_proto_rawDesc = nil + file_grpc_binlog_v1_binarylog_proto_goTypes = nil + file_grpc_binlog_v1_binarylog_proto_depIdxs = nil } diff --git a/vendor/google.golang.org/grpc/clientconn.go b/vendor/google.golang.org/grpc/clientconn.go index ae5ce4947e2..77a08fd33bf 100644 --- a/vendor/google.golang.org/grpc/clientconn.go +++ b/vendor/google.golang.org/grpc/clientconn.go @@ -23,7 +23,6 @@ import ( "errors" "fmt" "math" - "net" "reflect" "strings" "sync" @@ -39,6 +38,7 @@ import ( "google.golang.org/grpc/internal/channelz" "google.golang.org/grpc/internal/grpcsync" "google.golang.org/grpc/internal/grpcutil" + iresolver "google.golang.org/grpc/internal/resolver" "google.golang.org/grpc/internal/transport" "google.golang.org/grpc/keepalive" "google.golang.org/grpc/resolver" @@ -48,6 +48,7 @@ import ( _ "google.golang.org/grpc/balancer/roundrobin" // To register roundrobin. _ "google.golang.org/grpc/internal/resolver/dns" // To register dns resolver. _ "google.golang.org/grpc/internal/resolver/passthrough" // To register passthrough resolver. + _ "google.golang.org/grpc/internal/resolver/unix" // To register unix resolver. ) const ( @@ -104,6 +105,17 @@ func Dial(target string, opts ...DialOption) (*ClientConn, error) { return DialContext(context.Background(), target, opts...) } +type defaultConfigSelector struct { + sc *ServiceConfig +} + +func (dcs *defaultConfigSelector) SelectConfig(rpcInfo iresolver.RPCInfo) (*iresolver.RPCConfig, error) { + return &iresolver.RPCConfig{ + Context: rpcInfo.Context, + MethodConfig: getMethodConfig(dcs.sc, rpcInfo.Method), + }, nil +} + // DialContext creates a client connection to the given target. By default, it's // a non-blocking dial (the function won't wait for connections to be // established, and connecting happens in the background). To make it a blocking @@ -151,10 +163,10 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * cc.channelzID = channelz.RegisterChannel(&channelzChannel{cc}, cc.dopts.channelzParentID, target) channelz.AddTraceEvent(logger, cc.channelzID, 0, &channelz.TraceEventDesc{ Desc: "Channel Created", - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, Parent: &channelz.TraceEventDesc{ Desc: fmt.Sprintf("Nested Channel(id:%d) created", cc.channelzID), - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, }, }) } else { @@ -191,16 +203,6 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * } cc.mkp = cc.dopts.copts.KeepaliveParams - if cc.dopts.copts.Dialer == nil { - cc.dopts.copts.Dialer = func(ctx context.Context, addr string) (net.Conn, error) { - network, addr := parseDialTarget(addr) - return (&net.Dialer{}).DialContext(ctx, network, addr) - } - if cc.dopts.withProxy { - cc.dopts.copts.Dialer = newProxyDialer(cc.dopts.copts.Dialer) - } - } - if cc.dopts.copts.UserAgent != "" { cc.dopts.copts.UserAgent += " " + grpcUA } else { @@ -234,6 +236,7 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * case sc, ok := <-cc.dopts.scChan: if ok { cc.sc = &sc + cc.safeConfigSelector.UpdateConfigSelector(&defaultConfigSelector{&sc}) scSet = true } default: @@ -244,8 +247,7 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * } // Determine the resolver to use. - cc.parsedTarget = grpcutil.ParseTarget(cc.target) - unixScheme := strings.HasPrefix(cc.target, "unix:") + cc.parsedTarget = grpcutil.ParseTarget(cc.target, cc.dopts.copts.Dialer != nil) channelz.Infof(logger, cc.channelzID, "parsed scheme: %q", cc.parsedTarget.Scheme) resolverBuilder := cc.getResolver(cc.parsedTarget.Scheme) if resolverBuilder == nil { @@ -268,8 +270,10 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * cc.authority = creds.Info().ServerName } else if cc.dopts.insecure && cc.dopts.authority != "" { cc.authority = cc.dopts.authority - } else if unixScheme { + } else if strings.HasPrefix(cc.target, "unix:") || strings.HasPrefix(cc.target, "unix-abstract:") { cc.authority = "localhost" + } else if strings.HasPrefix(cc.parsedTarget.Endpoint, ":") { + cc.authority = "localhost" + cc.parsedTarget.Endpoint } else { // Use endpoint from "scheme://authority/endpoint" as the default // authority for ClientConn. @@ -282,6 +286,7 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * case sc, ok := <-cc.dopts.scChan: if ok { cc.sc = &sc + cc.safeConfigSelector.UpdateConfigSelector(&defaultConfigSelector{&sc}) } case <-ctx.Done(): return nil, ctx.Err() @@ -299,6 +304,7 @@ func DialContext(ctx context.Context, target string, opts ...DialOption) (conn * DialCreds: credsClone, CredsBundle: cc.dopts.copts.CredsBundle, Dialer: cc.dopts.copts.Dialer, + CustomUserAgent: cc.dopts.copts.UserAgent, ChannelzParentID: cc.channelzID, Target: cc.parsedTarget, } @@ -487,6 +493,8 @@ type ClientConn struct { balancerBuildOpts balancer.BuildOptions blockingpicker *pickerWrapper + safeConfigSelector iresolver.SafeConfigSelector + mu sync.RWMutex resolverWrapper *ccResolverWrapper sc *ServiceConfig @@ -508,7 +516,11 @@ type ClientConn struct { // WaitForStateChange waits until the connectivity.State of ClientConn changes from sourceState or // ctx expires. A true value is returned in former case and false in latter. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func (cc *ClientConn) WaitForStateChange(ctx context.Context, sourceState connectivity.State) bool { ch := cc.csMgr.getNotifyChan() if cc.csMgr.getState() != sourceState { @@ -523,7 +535,11 @@ func (cc *ClientConn) WaitForStateChange(ctx context.Context, sourceState connec } // GetState returns the connectivity.State of ClientConn. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func (cc *ClientConn) GetState() connectivity.State { return cc.csMgr.getState() } @@ -539,6 +555,7 @@ func (cc *ClientConn) scWatcher() { // TODO: load balance policy runtime change is ignored. // We may revisit this decision in the future. cc.sc = &sc + cc.safeConfigSelector.UpdateConfigSelector(&defaultConfigSelector{&sc}) cc.mu.Unlock() case <-cc.ctx.Done(): return @@ -577,13 +594,13 @@ func init() { func (cc *ClientConn) maybeApplyDefaultServiceConfig(addrs []resolver.Address) { if cc.sc != nil { - cc.applyServiceConfigAndBalancer(cc.sc, addrs) + cc.applyServiceConfigAndBalancer(cc.sc, nil, addrs) return } if cc.dopts.defaultServiceConfig != nil { - cc.applyServiceConfigAndBalancer(cc.dopts.defaultServiceConfig, addrs) + cc.applyServiceConfigAndBalancer(cc.dopts.defaultServiceConfig, &defaultConfigSelector{cc.dopts.defaultServiceConfig}, addrs) } else { - cc.applyServiceConfigAndBalancer(emptyServiceConfig, addrs) + cc.applyServiceConfigAndBalancer(emptyServiceConfig, &defaultConfigSelector{emptyServiceConfig}, addrs) } } @@ -620,7 +637,15 @@ func (cc *ClientConn) updateResolverState(s resolver.State, err error) error { // default, per the error handling design? } else { if sc, ok := s.ServiceConfig.Config.(*ServiceConfig); s.ServiceConfig.Err == nil && ok { - cc.applyServiceConfigAndBalancer(sc, s.Addresses) + configSelector := iresolver.GetConfigSelector(s) + if configSelector != nil { + if len(s.ServiceConfig.Config.(*ServiceConfig).Methods) != 0 { + channelz.Infof(logger, cc.channelzID, "method configs in service config will be ignored due to presence of config selector") + } + } else { + configSelector = &defaultConfigSelector{sc} + } + cc.applyServiceConfigAndBalancer(sc, configSelector, s.Addresses) } else { ret = balancer.ErrBadResolverState if cc.balancerWrapper == nil { @@ -630,6 +655,7 @@ func (cc *ClientConn) updateResolverState(s resolver.State, err error) error { } else { err = status.Errorf(codes.Unavailable, "illegal service config type: %T", s.ServiceConfig.Config) } + cc.safeConfigSelector.UpdateConfigSelector(&defaultConfigSelector{cc.sc}) cc.blockingpicker.updatePicker(base.NewErrPicker(err)) cc.csMgr.updateState(connectivity.TransientFailure) cc.mu.Unlock() @@ -736,10 +762,10 @@ func (cc *ClientConn) newAddrConn(addrs []resolver.Address, opts balancer.NewSub ac.channelzID = channelz.RegisterSubChannel(ac, cc.channelzID, "") channelz.AddTraceEvent(logger, ac.channelzID, 0, &channelz.TraceEventDesc{ Desc: "Subchannel Created", - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, Parent: &channelz.TraceEventDesc{ Desc: fmt.Sprintf("Subchannel(id:%d) created", ac.channelzID), - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, }, }) } @@ -773,7 +799,11 @@ func (cc *ClientConn) channelzMetric() *channelz.ChannelInternalMetric { } // Target returns the target string of the ClientConn. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func (cc *ClientConn) Target() string { return cc.target } @@ -860,6 +890,20 @@ func (ac *addrConn) tryUpdateAddrs(addrs []resolver.Address) bool { return curAddrFound } +func getMethodConfig(sc *ServiceConfig, method string) MethodConfig { + if sc == nil { + return MethodConfig{} + } + if m, ok := sc.Methods[method]; ok { + return m + } + i := strings.LastIndex(method, "/") + if m, ok := sc.Methods[method[:i+1]]; ok { + return m + } + return sc.Methods[""] +} + // GetMethodConfig gets the method config of the input method. // If there's an exact match for input method (i.e. /service/method), we return // the corresponding MethodConfig. @@ -872,17 +916,7 @@ func (cc *ClientConn) GetMethodConfig(method string) MethodConfig { // TODO: Avoid the locking here. cc.mu.RLock() defer cc.mu.RUnlock() - if cc.sc == nil { - return MethodConfig{} - } - if m, ok := cc.sc.Methods[method]; ok { - return m - } - i := strings.LastIndex(method, "/") - if m, ok := cc.sc.Methods[method[:i+1]]; ok { - return m - } - return cc.sc.Methods[""] + return getMethodConfig(cc.sc, method) } func (cc *ClientConn) healthCheckConfig() *healthCheckConfig { @@ -905,12 +939,15 @@ func (cc *ClientConn) getTransport(ctx context.Context, failfast bool, method st return t, done, nil } -func (cc *ClientConn) applyServiceConfigAndBalancer(sc *ServiceConfig, addrs []resolver.Address) { +func (cc *ClientConn) applyServiceConfigAndBalancer(sc *ServiceConfig, configSelector iresolver.ConfigSelector, addrs []resolver.Address) { if sc == nil { // should never reach here. return } cc.sc = sc + if configSelector != nil { + cc.safeConfigSelector.UpdateConfigSelector(configSelector) + } if cc.sc.retryThrottling != nil { newThrottler := &retryThrottler{ @@ -974,7 +1011,10 @@ func (cc *ClientConn) resolveNow(o resolver.ResolveNowOptions) { // However, if a previously unavailable network becomes available, this may be // used to trigger an immediate reconnect. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func (cc *ClientConn) ResetConnectBackoff() { cc.mu.Lock() conns := cc.conns @@ -1018,12 +1058,12 @@ func (cc *ClientConn) Close() error { if channelz.IsOn() { ted := &channelz.TraceEventDesc{ Desc: "Channel Deleted", - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, } if cc.dopts.channelzParentID != 0 { ted.Parent = &channelz.TraceEventDesc{ Desc: fmt.Sprintf("Nested channel(id:%d) deleted", cc.channelzID), - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, } } channelz.AddTraceEvent(logger, cc.channelzID, 0, ted) @@ -1436,10 +1476,10 @@ func (ac *addrConn) tearDown(err error) { if channelz.IsOn() { channelz.AddTraceEvent(logger, ac.channelzID, 0, &channelz.TraceEventDesc{ Desc: "Subchannel Deleted", - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, Parent: &channelz.TraceEventDesc{ Desc: fmt.Sprintf("Subchanel(id:%d) deleted", ac.channelzID), - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, }, }) // TraceEvent needs to be called before RemoveEntry, as TraceEvent may add trace reference to diff --git a/vendor/google.golang.org/grpc/connectivity/connectivity.go b/vendor/google.golang.org/grpc/connectivity/connectivity.go index 34ec36fbf6d..01015626150 100644 --- a/vendor/google.golang.org/grpc/connectivity/connectivity.go +++ b/vendor/google.golang.org/grpc/connectivity/connectivity.go @@ -22,11 +22,11 @@ package connectivity import ( - "context" - "google.golang.org/grpc/grpclog" ) +var logger = grpclog.Component("core") + // State indicates the state of connectivity. // It can be the state of a ClientConn or SubConn. type State int @@ -44,7 +44,7 @@ func (s State) String() string { case Shutdown: return "SHUTDOWN" default: - grpclog.Errorf("unknown connectivity state: %d", s) + logger.Errorf("unknown connectivity state: %d", s) return "Invalid-State" } } @@ -61,13 +61,3 @@ const ( // Shutdown indicates the ClientConn has started shutting down. Shutdown ) - -// Reporter reports the connectivity states. -type Reporter interface { - // CurrentState returns the current state of the reporter. - CurrentState() State - // WaitForStateChange blocks until the reporter's state is different from the given state, - // and returns true. - // It returns false if <-ctx.Done() can proceed (ctx got timeout or got canceled). - WaitForStateChange(context.Context, State) bool -} diff --git a/vendor/google.golang.org/grpc/credentials/credentials.go b/vendor/google.golang.org/grpc/credentials/credentials.go index 02766443ae7..e69562e7878 100644 --- a/vendor/google.golang.org/grpc/credentials/credentials.go +++ b/vendor/google.golang.org/grpc/credentials/credentials.go @@ -58,9 +58,9 @@ type PerRPCCredentials interface { type SecurityLevel int const ( - // Invalid indicates an invalid security level. + // InvalidSecurityLevel indicates an invalid security level. // The zero SecurityLevel value is invalid for backward compatibility. - Invalid SecurityLevel = iota + InvalidSecurityLevel SecurityLevel = iota // NoSecurity indicates a connection is insecure. NoSecurity // IntegrityOnly indicates a connection only provides integrity protection. @@ -92,7 +92,7 @@ type CommonAuthInfo struct { } // GetCommonAuthInfo returns the pointer to CommonAuthInfo struct. -func (c *CommonAuthInfo) GetCommonAuthInfo() *CommonAuthInfo { +func (c CommonAuthInfo) GetCommonAuthInfo() CommonAuthInfo { return c } @@ -229,17 +229,16 @@ func ClientHandshakeInfoFromContext(ctx context.Context) ClientHandshakeInfo { // or 3) CommonAuthInfo.SecurityLevel has an invalid zero value. For 2) and 3), it is for the purpose of backward-compatibility. // // This API is experimental. -func CheckSecurityLevel(ctx context.Context, level SecurityLevel) error { +func CheckSecurityLevel(ai AuthInfo, level SecurityLevel) error { type internalInfo interface { - GetCommonAuthInfo() *CommonAuthInfo + GetCommonAuthInfo() CommonAuthInfo } - ri, _ := RequestInfoFromContext(ctx) - if ri.AuthInfo == nil { - return errors.New("unable to obtain SecurityLevel from context") + if ai == nil { + return errors.New("AuthInfo is nil") } - if ci, ok := ri.AuthInfo.(internalInfo); ok { + if ci, ok := ai.(internalInfo); ok { // CommonAuthInfo.SecurityLevel has an invalid value. - if ci.GetCommonAuthInfo().SecurityLevel == Invalid { + if ci.GetCommonAuthInfo().SecurityLevel == InvalidSecurityLevel { return nil } if ci.GetCommonAuthInfo().SecurityLevel < level { diff --git a/vendor/google.golang.org/grpc/credentials/tls.go b/vendor/google.golang.org/grpc/credentials/tls.go index 1ba6f3a6b8f..8ee7124f226 100644 --- a/vendor/google.golang.org/grpc/credentials/tls.go +++ b/vendor/google.golang.org/grpc/credentials/tls.go @@ -27,7 +27,6 @@ import ( "net" "net/url" - "google.golang.org/grpc/credentials/internal" credinternal "google.golang.org/grpc/internal/credentials" ) @@ -73,7 +72,7 @@ func (c tlsCreds) Info() ProtocolInfo { func (c *tlsCreds) ClientHandshake(ctx context.Context, authority string, rawConn net.Conn) (_ net.Conn, _ AuthInfo, err error) { // use local cfg to avoid clobbering ServerName if using multiple endpoints - cfg := cloneTLSConfig(c.config) + cfg := credinternal.CloneTLSConfig(c.config) if cfg.ServerName == "" { serverName, _, err := net.SplitHostPort(authority) if err != nil { @@ -108,7 +107,7 @@ func (c *tlsCreds) ClientHandshake(ctx context.Context, authority string, rawCon if id != nil { tlsInfo.SPIFFEID = id } - return internal.WrapSyscallConn(rawConn, conn), tlsInfo, nil + return credinternal.WrapSyscallConn(rawConn, conn), tlsInfo, nil } func (c *tlsCreds) ServerHandshake(rawConn net.Conn) (net.Conn, AuthInfo, error) { @@ -127,7 +126,7 @@ func (c *tlsCreds) ServerHandshake(rawConn net.Conn) (net.Conn, AuthInfo, error) if id != nil { tlsInfo.SPIFFEID = id } - return internal.WrapSyscallConn(rawConn, conn), tlsInfo, nil + return credinternal.WrapSyscallConn(rawConn, conn), tlsInfo, nil } func (c *tlsCreds) Clone() TransportCredentials { @@ -139,23 +138,10 @@ func (c *tlsCreds) OverrideServerName(serverNameOverride string) error { return nil } -const alpnProtoStrH2 = "h2" - -func appendH2ToNextProtos(ps []string) []string { - for _, p := range ps { - if p == alpnProtoStrH2 { - return ps - } - } - ret := make([]string, 0, len(ps)+1) - ret = append(ret, ps...) - return append(ret, alpnProtoStrH2) -} - // NewTLS uses c to construct a TransportCredentials based on TLS. func NewTLS(c *tls.Config) TransportCredentials { - tc := &tlsCreds{cloneTLSConfig(c)} - tc.config.NextProtos = appendH2ToNextProtos(tc.config.NextProtos) + tc := &tlsCreds{credinternal.CloneTLSConfig(c)} + tc.config.NextProtos = credinternal.AppendH2ToNextProtos(tc.config.NextProtos) return tc } @@ -209,7 +195,10 @@ func NewServerTLSFromFile(certFile, keyFile string) (TransportCredentials, error // TLSChannelzSecurityValue defines the struct that TLS protocol should return // from GetSecurityValue(), containing security info like cipher and certificate used. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type TLSChannelzSecurityValue struct { ChannelzSecurityValue StandardName string @@ -242,18 +231,3 @@ var cipherSuiteLookup = map[uint16]string{ tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305: "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305: "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", } - -// cloneTLSConfig returns a shallow clone of the exported -// fields of cfg, ignoring the unexported sync.Once, which -// contains a mutex and must not be copied. -// -// If cfg is nil, a new zero tls.Config is returned. -// -// TODO: inline this function if possible. -func cloneTLSConfig(cfg *tls.Config) *tls.Config { - if cfg == nil { - return &tls.Config{} - } - - return cfg.Clone() -} diff --git a/vendor/google.golang.org/grpc/dialoptions.go b/vendor/google.golang.org/grpc/dialoptions.go index decb4c5ee89..e7f86e6d7c8 100644 --- a/vendor/google.golang.org/grpc/dialoptions.go +++ b/vendor/google.golang.org/grpc/dialoptions.go @@ -71,7 +71,6 @@ type dialOptions struct { // we need to be able to configure this in tests. resolveNowBackoff func(int) time.Duration resolvers []resolver.Builder - withProxy bool } // DialOption configures how we set up the connection. @@ -82,7 +81,10 @@ type DialOption interface { // EmptyDialOption does not alter the dial configuration. It can be embedded in // another structure to build custom dial options. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type EmptyDialOption struct{} func (EmptyDialOption) apply(*dialOptions) {} @@ -238,7 +240,10 @@ func WithServiceConfig(c <-chan ServiceConfig) DialOption { // using the backoff.DefaultConfig as a base, in cases where you want to // override only a subset of the backoff configuration. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithConnectParams(p ConnectParams) DialOption { return newFuncDialOption(func(o *dialOptions) { o.bs = internalbackoff.Exponential{Config: p.Backoff} @@ -290,7 +295,10 @@ func WithBlock() DialOption { // the context.DeadlineExceeded error. // Implies WithBlock() // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithReturnConnectionError() DialOption { return newFuncDialOption(func(o *dialOptions) { o.block = true @@ -310,10 +318,13 @@ func WithInsecure() DialOption { // WithNoProxy returns a DialOption which disables the use of proxies for this // ClientConn. This is ignored if WithDialer or WithContextDialer are used. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithNoProxy() DialOption { return newFuncDialOption(func(o *dialOptions) { - o.withProxy = false + o.copts.UseProxy = false }) } @@ -338,7 +349,10 @@ func WithPerRPCCredentials(creds credentials.PerRPCCredentials) DialOption { // the ClientConn.WithCreds. This should not be used together with // WithTransportCredentials. // -// This API is experimental. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithCredentialsBundle(b credentials.Bundle) DialOption { return newFuncDialOption(func(o *dialOptions) { o.copts.CredsBundle = b @@ -403,7 +417,10 @@ func WithStatsHandler(h stats.Handler) DialOption { // FailOnNonTempDialError only affects the initial dial, and does not do // anything useful unless you are also using WithBlock(). // -// This is an EXPERIMENTAL API. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func FailOnNonTempDialError(f bool) DialOption { return newFuncDialOption(func(o *dialOptions) { o.copts.FailOnNonTempDialError = f @@ -481,7 +498,10 @@ func WithAuthority(a string) DialOption { // current ClientConn's parent. This function is used in nested channel creation // (e.g. grpclb dial). // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithChannelzParentID(id int64) DialOption { return newFuncDialOption(func(o *dialOptions) { o.channelzParentID = id @@ -507,7 +527,10 @@ func WithDisableServiceConfig() DialOption { // 2. Resolver does not return a service config or if the resolver returns an // invalid service config. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithDefaultServiceConfig(s string) DialOption { return newFuncDialOption(func(o *dialOptions) { o.defaultServiceConfigRawJSON = &s @@ -523,7 +546,10 @@ func WithDefaultServiceConfig(s string) DialOption { // default in the future. Until then, it may be enabled by setting the // environment variable "GRPC_GO_RETRY" to "on". // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithDisableRetry() DialOption { return newFuncDialOption(func(o *dialOptions) { o.disableRetry = true @@ -541,7 +567,10 @@ func WithMaxHeaderListSize(s uint32) DialOption { // WithDisableHealthCheck disables the LB channel health checking for all // SubConns of this ClientConn. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithDisableHealthCheck() DialOption { return newFuncDialOption(func(o *dialOptions) { o.disableHealthCheck = true @@ -565,9 +594,9 @@ func defaultDialOptions() dialOptions { copts: transport.ConnectOptions{ WriteBufferSize: defaultWriteBufSize, ReadBufferSize: defaultReadBufSize, + UseProxy: true, }, resolveNowBackoff: internalbackoff.DefaultExponential.Backoff, - withProxy: true, } } @@ -597,7 +626,10 @@ func withResolveNowBackoff(f func(int) time.Duration) DialOption { // resolver.Register. They will be matched against the scheme used for the // current Dial only, and will take precedence over the global registry. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func WithResolvers(rs ...resolver.Builder) DialOption { return newFuncDialOption(func(o *dialOptions) { o.resolvers = append(o.resolvers, rs...) diff --git a/vendor/google.golang.org/grpc/encoding/encoding.go b/vendor/google.golang.org/grpc/encoding/encoding.go index 195e8448b64..6d84f74c7d0 100644 --- a/vendor/google.golang.org/grpc/encoding/encoding.go +++ b/vendor/google.golang.org/grpc/encoding/encoding.go @@ -19,7 +19,10 @@ // Package encoding defines the interface for the compressor and codec, and // functions to register and retrieve compressors and codecs. // -// This package is EXPERIMENTAL. +// Experimental +// +// Notice: This package is EXPERIMENTAL and may be changed or removed in a +// later release. package encoding import ( @@ -46,10 +49,15 @@ type Compressor interface { // coding header. The result must be static; the result cannot change // between calls. Name() string - // EXPERIMENTAL: if a Compressor implements + // If a Compressor implements // DecompressedSize(compressedBytes []byte) int, gRPC will call it // to determine the size of the buffer allocated for the result of decompression. // Return -1 to indicate unknown size. + // + // Experimental + // + // Notice: This API is EXPERIMENTAL and may be changed or removed in a + // later release. } var registeredCompressor = make(map[string]Compressor) diff --git a/vendor/google.golang.org/grpc/encoding/proto/proto.go b/vendor/google.golang.org/grpc/encoding/proto/proto.go index 66b97a6f692..3009b35afe7 100644 --- a/vendor/google.golang.org/grpc/encoding/proto/proto.go +++ b/vendor/google.golang.org/grpc/encoding/proto/proto.go @@ -21,8 +21,7 @@ package proto import ( - "math" - "sync" + "fmt" "github.com/golang/protobuf/proto" "google.golang.org/grpc/encoding" @@ -38,73 +37,22 @@ func init() { // codec is a Codec implementation with protobuf. It is the default codec for gRPC. type codec struct{} -type cachedProtoBuffer struct { - lastMarshaledSize uint32 - proto.Buffer -} - -func capToMaxInt32(val int) uint32 { - if val > math.MaxInt32 { - return uint32(math.MaxInt32) - } - return uint32(val) -} - -func marshal(v interface{}, cb *cachedProtoBuffer) ([]byte, error) { - protoMsg := v.(proto.Message) - newSlice := make([]byte, 0, cb.lastMarshaledSize) - - cb.SetBuf(newSlice) - cb.Reset() - if err := cb.Marshal(protoMsg); err != nil { - return nil, err - } - out := cb.Bytes() - cb.lastMarshaledSize = capToMaxInt32(len(out)) - return out, nil -} - func (codec) Marshal(v interface{}) ([]byte, error) { - if pm, ok := v.(proto.Marshaler); ok { - // object can marshal itself, no need for buffer - return pm.Marshal() + vv, ok := v.(proto.Message) + if !ok { + return nil, fmt.Errorf("failed to marshal, message is %T, want proto.Message", v) } - - cb := protoBufferPool.Get().(*cachedProtoBuffer) - out, err := marshal(v, cb) - - // put back buffer and lose the ref to the slice - cb.SetBuf(nil) - protoBufferPool.Put(cb) - return out, err + return proto.Marshal(vv) } func (codec) Unmarshal(data []byte, v interface{}) error { - protoMsg := v.(proto.Message) - protoMsg.Reset() - - if pu, ok := protoMsg.(proto.Unmarshaler); ok { - // object can unmarshal itself, no need for buffer - return pu.Unmarshal(data) + vv, ok := v.(proto.Message) + if !ok { + return fmt.Errorf("failed to unmarshal, message is %T, want proto.Message", v) } - - cb := protoBufferPool.Get().(*cachedProtoBuffer) - cb.SetBuf(data) - err := cb.Unmarshal(protoMsg) - cb.SetBuf(nil) - protoBufferPool.Put(cb) - return err + return proto.Unmarshal(data, vv) } func (codec) Name() string { return Name } - -var protoBufferPool = &sync.Pool{ - New: func() interface{} { - return &cachedProtoBuffer{ - Buffer: proto.Buffer{}, - lastMarshaledSize: 16, - } - }, -} diff --git a/vendor/google.golang.org/grpc/go.mod b/vendor/google.golang.org/grpc/go.mod index 31f2b01f64e..cab74e55774 100644 --- a/vendor/google.golang.org/grpc/go.mod +++ b/vendor/google.golang.org/grpc/go.mod @@ -3,13 +3,15 @@ module google.golang.org/grpc go 1.11 require ( - github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f - github.com/envoyproxy/go-control-plane v0.9.4 + github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403 + github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b - github.com/golang/protobuf v1.3.3 - github.com/google/go-cmp v0.4.0 + github.com/golang/protobuf v1.4.2 + github.com/google/go-cmp v0.5.0 + github.com/google/uuid v1.1.2 golang.org/x/net v0.0.0-20190311183353-d8887717615a golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a - google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 + google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 + google.golang.org/protobuf v1.25.0 ) diff --git a/vendor/google.golang.org/grpc/go.sum b/vendor/google.golang.org/grpc/go.sum index be8078eace2..77ee70b4435 100644 --- a/vendor/google.golang.org/grpc/go.sum +++ b/vendor/google.golang.org/grpc/go.sum @@ -2,35 +2,52 @@ cloud.google.com/go v0.26.0 h1:e0WKqKTd5BnrG8aKH3J3h+QvEIQtSUcf2n5UZ5ZgLtQ= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f h1:WBZRG4aNOuI15bLRrCgN8fCq8E5Xuty6jGbmSNEvSsU= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403 h1:cqQfy1jclcSy/FwLjemeg3SR1yaINm74aQyupQ0Bl8M= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4 h1:rEvIZUSZ3fx39WIi3JkQqQBitGwpELBIYWeBVh6wn+E= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad h1:EmNYJhPYy0pOFjCx2PrgtaBXmee0iUX9hLlxE1xHOJE= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0 h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/mock v1.1.1 h1:G5FRp8JnTd7RQH5kemVNlMeyXQAztQ3mOWV95KxsXH8= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0 h1:/QaMHBdZ26BB3SSst0Iwl10Epc+xhTquomWX0oZEB6w= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3 h1:XQyxROzUlZH+WIQwySDgnISgOivlhjIEwaQaJEJrrN0= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -50,7 +67,6 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135 h1:5Beo0mZN8dRzgrMMkDp0jc8YXQKx9DiJ2k1dkvGsn5A= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -58,11 +74,26 @@ google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9Ywl google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 h1:gSJIx1SDwno+2ElGhA4+qG2zF97qiUzTM+rQ0klBOcE= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/vendor/google.golang.org/grpc/grpclog/component.go b/vendor/google.golang.org/grpc/grpclog/component.go index b513281a34c..8358dd6e2ab 100644 --- a/vendor/google.golang.org/grpc/grpclog/component.go +++ b/vendor/google.golang.org/grpc/grpclog/component.go @@ -100,7 +100,7 @@ func (c *componentData) Fatalln(args ...interface{}) { } func (c *componentData) V(l int) bool { - return grpclog.Logger.V(l) + return V(l) } // Component creates a new component and returns it for logging. If a component diff --git a/vendor/google.golang.org/grpc/grpclog/loggerv2.go b/vendor/google.golang.org/grpc/grpclog/loggerv2.go index 8eba2d0e0ef..4ee33171e00 100644 --- a/vendor/google.golang.org/grpc/grpclog/loggerv2.go +++ b/vendor/google.golang.org/grpc/grpclog/loggerv2.go @@ -204,7 +204,10 @@ func (g *loggerT) V(l int) bool { // DepthLoggerV2, the below functions will be called with the appropriate stack // depth set for trivial functions the logger may ignore. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type DepthLoggerV2 interface { LoggerV2 // InfoDepth logs to INFO log at the specified depth. Arguments are handled in the manner of fmt.Print. diff --git a/vendor/google.golang.org/grpc/install_gae.sh b/vendor/google.golang.org/grpc/install_gae.sh index 7c7bcada504..15ff9facdd7 100644 --- a/vendor/google.golang.org/grpc/install_gae.sh +++ b/vendor/google.golang.org/grpc/install_gae.sh @@ -3,4 +3,4 @@ TMP=$(mktemp -d /tmp/sdk.XXX) \ && curl -o $TMP.zip "https://storage.googleapis.com/appengine-sdks/featured/go_appengine_sdk_linux_amd64-1.9.68.zip" \ && unzip -q $TMP.zip -d $TMP \ -&& export PATH="$PATH:$TMP/go_appengine" +&& export PATH="$PATH:$TMP/go_appengine" \ No newline at end of file diff --git a/vendor/google.golang.org/grpc/interceptor.go b/vendor/google.golang.org/grpc/interceptor.go index 8b7350022ad..668e0adcf0a 100644 --- a/vendor/google.golang.org/grpc/interceptor.go +++ b/vendor/google.golang.org/grpc/interceptor.go @@ -25,17 +25,41 @@ import ( // UnaryInvoker is called by UnaryClientInterceptor to complete RPCs. type UnaryInvoker func(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, opts ...CallOption) error -// UnaryClientInterceptor intercepts the execution of a unary RPC on the client. invoker is the handler to complete the RPC -// and it is the responsibility of the interceptor to call it. -// This is an EXPERIMENTAL API. +// UnaryClientInterceptor intercepts the execution of a unary RPC on the client. +// Unary interceptors can be specified as a DialOption, using +// WithUnaryInterceptor() or WithChainUnaryInterceptor(), when creating a +// ClientConn. When a unary interceptor(s) is set on a ClientConn, gRPC +// delegates all unary RPC invocations to the interceptor, and it is the +// responsibility of the interceptor to call invoker to complete the processing +// of the RPC. +// +// method is the RPC name. req and reply are the corresponding request and +// response messages. cc is the ClientConn on which the RPC was invoked. invoker +// is the handler to complete the RPC and it is the responsibility of the +// interceptor to call it. opts contain all applicable call options, including +// defaults from the ClientConn as well as per-call options. +// +// The returned error must be compatible with the status package. type UnaryClientInterceptor func(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, invoker UnaryInvoker, opts ...CallOption) error // Streamer is called by StreamClientInterceptor to create a ClientStream. type Streamer func(ctx context.Context, desc *StreamDesc, cc *ClientConn, method string, opts ...CallOption) (ClientStream, error) -// StreamClientInterceptor intercepts the creation of ClientStream. It may return a custom ClientStream to intercept all I/O -// operations. streamer is the handler to create a ClientStream and it is the responsibility of the interceptor to call it. -// This is an EXPERIMENTAL API. +// StreamClientInterceptor intercepts the creation of a ClientStream. Stream +// interceptors can be specified as a DialOption, using WithStreamInterceptor() +// or WithChainStreamInterceptor(), when creating a ClientConn. When a stream +// interceptor(s) is set on the ClientConn, gRPC delegates all stream creations +// to the interceptor, and it is the responsibility of the interceptor to call +// streamer. +// +// desc contains a description of the stream. cc is the ClientConn on which the +// RPC was invoked. streamer is the handler to create a ClientStream and it is +// the responsibility of the interceptor to call it. opts contain all applicable +// call options, including defaults from the ClientConn as well as per-call +// options. +// +// StreamClientInterceptor may return a custom ClientStream to intercept all I/O +// operations. The returned error must be compatible with the status package. type StreamClientInterceptor func(ctx context.Context, desc *StreamDesc, cc *ClientConn, method string, streamer Streamer, opts ...CallOption) (ClientStream, error) // UnaryServerInfo consists of various information about a unary RPC on diff --git a/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go b/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go index 5e1083539b4..0cdb4183150 100644 --- a/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go +++ b/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go @@ -65,7 +65,7 @@ func newMethodLogger(h, m uint64) *MethodLogger { callID: idGen.next(), idWithinCallGen: &callIDGenerator{}, - sink: defaultSink, // TODO(blog): make it plugable. + sink: DefaultSink, // TODO(blog): make it plugable. } } diff --git a/vendor/google.golang.org/grpc/internal/binarylog/sink.go b/vendor/google.golang.org/grpc/internal/binarylog/sink.go index 835f51040cb..7d7a3056b71 100644 --- a/vendor/google.golang.org/grpc/internal/binarylog/sink.go +++ b/vendor/google.golang.org/grpc/internal/binarylog/sink.go @@ -21,9 +21,7 @@ package binarylog import ( "bufio" "encoding/binary" - "fmt" "io" - "io/ioutil" "sync" "time" @@ -32,20 +30,14 @@ import ( ) var ( - defaultSink Sink = &noopSink{} // TODO(blog): change this default (file in /tmp). + // DefaultSink is the sink where the logs will be written to. It's exported + // for the binarylog package to update. + DefaultSink Sink = &noopSink{} // TODO(blog): change this default (file in /tmp). ) -// SetDefaultSink sets the sink where binary logs will be written to. -// -// Not thread safe. Only set during initialization. -func SetDefaultSink(s Sink) { - if defaultSink != nil { - defaultSink.Close() - } - defaultSink = s -} - // Sink writes log entry into the binary log sink. +// +// sink is a copy of the exported binarylog.Sink, to avoid circular dependency. type Sink interface { // Write will be called to write the log entry into the sink. // @@ -66,7 +58,7 @@ func (ns *noopSink) Close() error { return nil } // message is prefixed with a 4 byte big endian unsigned integer as the length. // // No buffer is done, Close() doesn't try to close the writer. -func newWriterSink(w io.Writer) *writerSink { +func newWriterSink(w io.Writer) Sink { return &writerSink{out: w} } @@ -92,17 +84,17 @@ func (ws *writerSink) Write(e *pb.GrpcLogEntry) error { func (ws *writerSink) Close() error { return nil } -type bufWriteCloserSink struct { +type bufferedSink struct { mu sync.Mutex closer io.Closer - out *writerSink // out is built on buf. + out Sink // out is built on buf. buf *bufio.Writer // buf is kept for flush. writeStartOnce sync.Once writeTicker *time.Ticker } -func (fs *bufWriteCloserSink) Write(e *pb.GrpcLogEntry) error { +func (fs *bufferedSink) Write(e *pb.GrpcLogEntry) error { // Start the write loop when Write is called. fs.writeStartOnce.Do(fs.startFlushGoroutine) fs.mu.Lock() @@ -118,44 +110,50 @@ const ( bufFlushDuration = 60 * time.Second ) -func (fs *bufWriteCloserSink) startFlushGoroutine() { +func (fs *bufferedSink) startFlushGoroutine() { fs.writeTicker = time.NewTicker(bufFlushDuration) go func() { for range fs.writeTicker.C { fs.mu.Lock() - fs.buf.Flush() + if err := fs.buf.Flush(); err != nil { + grpclogLogger.Warningf("failed to flush to Sink: %v", err) + } fs.mu.Unlock() } }() } -func (fs *bufWriteCloserSink) Close() error { +func (fs *bufferedSink) Close() error { if fs.writeTicker != nil { fs.writeTicker.Stop() } fs.mu.Lock() - fs.buf.Flush() - fs.closer.Close() - fs.out.Close() + if err := fs.buf.Flush(); err != nil { + grpclogLogger.Warningf("failed to flush to Sink: %v", err) + } + if err := fs.closer.Close(); err != nil { + grpclogLogger.Warningf("failed to close the underlying WriterCloser: %v", err) + } + if err := fs.out.Close(); err != nil { + grpclogLogger.Warningf("failed to close the Sink: %v", err) + } fs.mu.Unlock() return nil } -func newBufWriteCloserSink(o io.WriteCloser) Sink { +// NewBufferedSink creates a binary log sink with the given WriteCloser. +// +// Write() marshals the proto message and writes it to the given writer. Each +// message is prefixed with a 4 byte big endian unsigned integer as the length. +// +// Content is kept in a buffer, and is flushed every 60 seconds. +// +// Close closes the WriteCloser. +func NewBufferedSink(o io.WriteCloser) Sink { bufW := bufio.NewWriter(o) - return &bufWriteCloserSink{ + return &bufferedSink{ closer: o, out: newWriterSink(bufW), buf: bufW, } } - -// NewTempFileSink creates a temp file and returns a Sink that writes to this -// file. -func NewTempFileSink() (Sink, error) { - tempFile, err := ioutil.TempFile("/tmp", "grpcgo_binarylog_*.txt") - if err != nil { - return nil, fmt.Errorf("failed to create temp file: %v", err) - } - return newBufWriteCloserSink(tempFile), nil -} diff --git a/vendor/google.golang.org/grpc/internal/channelz/funcs.go b/vendor/google.golang.org/grpc/internal/channelz/funcs.go index 81d3dd33e62..f7314139303 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/funcs.go +++ b/vendor/google.golang.org/grpc/internal/channelz/funcs.go @@ -297,9 +297,7 @@ type TraceEventDesc struct { func AddTraceEvent(l grpclog.DepthLoggerV2, id int64, depth int, desc *TraceEventDesc) { for d := desc; d != nil; d = d.Parent { switch d.Severity { - case CtUNKNOWN: - l.InfoDepth(depth+1, d.Desc) - case CtINFO: + case CtUnknown, CtInfo: l.InfoDepth(depth+1, d.Desc) case CtWarning: l.WarningDepth(depth+1, d.Desc) diff --git a/vendor/google.golang.org/grpc/internal/channelz/logging.go b/vendor/google.golang.org/grpc/internal/channelz/logging.go index e94039ee20b..b0013f9c886 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/logging.go +++ b/vendor/google.golang.org/grpc/internal/channelz/logging.go @@ -31,7 +31,7 @@ func Info(l grpclog.DepthLoggerV2, id int64, args ...interface{}) { if IsOn() { AddTraceEvent(l, id, 1, &TraceEventDesc{ Desc: fmt.Sprint(args...), - Severity: CtINFO, + Severity: CtInfo, }) } else { l.InfoDepth(1, args...) @@ -44,7 +44,7 @@ func Infof(l grpclog.DepthLoggerV2, id int64, format string, args ...interface{} if IsOn() { AddTraceEvent(l, id, 1, &TraceEventDesc{ Desc: msg, - Severity: CtINFO, + Severity: CtInfo, }) } else { l.InfoDepth(1, msg) diff --git a/vendor/google.golang.org/grpc/internal/channelz/types.go b/vendor/google.golang.org/grpc/internal/channelz/types.go index 075dc7d1671..3c595d154bd 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/types.go +++ b/vendor/google.golang.org/grpc/internal/channelz/types.go @@ -672,10 +672,10 @@ func (c *channelTrace) clear() { type Severity int const ( - // CtUNKNOWN indicates unknown severity of a trace event. - CtUNKNOWN Severity = iota - // CtINFO indicates info level severity of a trace event. - CtINFO + // CtUnknown indicates unknown severity of a trace event. + CtUnknown Severity = iota + // CtInfo indicates info level severity of a trace event. + CtInfo // CtWarning indicates warning level severity of a trace event. CtWarning // CtError indicates error level severity of a trace event. diff --git a/vendor/google.golang.org/grpc/internal/credentials/go110.go b/vendor/google.golang.org/grpc/internal/credentials/spiffe.go similarity index 66% rename from vendor/google.golang.org/grpc/internal/credentials/go110.go rename to vendor/google.golang.org/grpc/internal/credentials/spiffe.go index d55b5203626..be70b6cdfc3 100644 --- a/vendor/google.golang.org/grpc/internal/credentials/go110.go +++ b/vendor/google.golang.org/grpc/internal/credentials/spiffe.go @@ -1,4 +1,4 @@ -// +build go1.10 +// +build !appengine /* * @@ -25,38 +25,50 @@ package credentials import ( "crypto/tls" + "crypto/x509" "net/url" "google.golang.org/grpc/grpclog" ) +var logger = grpclog.Component("credentials") + // SPIFFEIDFromState parses the SPIFFE ID from State. If the SPIFFE ID format // is invalid, return nil with warning. func SPIFFEIDFromState(state tls.ConnectionState) *url.URL { if len(state.PeerCertificates) == 0 || len(state.PeerCertificates[0].URIs) == 0 { return nil } + return SPIFFEIDFromCert(state.PeerCertificates[0]) +} + +// SPIFFEIDFromCert parses the SPIFFE ID from x509.Certificate. If the SPIFFE +// ID format is invalid, return nil with warning. +func SPIFFEIDFromCert(cert *x509.Certificate) *url.URL { + if cert == nil || cert.URIs == nil { + return nil + } var spiffeID *url.URL - for _, uri := range state.PeerCertificates[0].URIs { + for _, uri := range cert.URIs { if uri == nil || uri.Scheme != "spiffe" || uri.Opaque != "" || (uri.User != nil && uri.User.Username() != "") { continue } // From this point, we assume the uri is intended for a SPIFFE ID. if len(uri.String()) > 2048 { - grpclog.Warning("invalid SPIFFE ID: total ID length larger than 2048 bytes") + logger.Warning("invalid SPIFFE ID: total ID length larger than 2048 bytes") return nil } - if len(uri.Host) == 0 || len(uri.RawPath) == 0 || len(uri.Path) == 0 { - grpclog.Warning("invalid SPIFFE ID: domain or workload ID is empty") + if len(uri.Host) == 0 || len(uri.Path) == 0 { + logger.Warning("invalid SPIFFE ID: domain or workload ID is empty") return nil } if len(uri.Host) > 255 { - grpclog.Warning("invalid SPIFFE ID: domain length larger than 255 characters") + logger.Warning("invalid SPIFFE ID: domain length larger than 255 characters") return nil } // A valid SPIFFE certificate can only have exactly one URI SAN field. - if len(state.PeerCertificates[0].URIs) > 1 { - grpclog.Warning("invalid SPIFFE ID: multiple URI SANs") + if len(cert.URIs) > 1 { + logger.Warning("invalid SPIFFE ID: multiple URI SANs") return nil } spiffeID = uri diff --git a/vendor/google.golang.org/grpc/internal/credentials/gobefore110.go b/vendor/google.golang.org/grpc/internal/credentials/spiffe_appengine.go similarity index 90% rename from vendor/google.golang.org/grpc/internal/credentials/gobefore110.go rename to vendor/google.golang.org/grpc/internal/credentials/spiffe_appengine.go index 743713e19f8..af6f5771976 100644 --- a/vendor/google.golang.org/grpc/internal/credentials/gobefore110.go +++ b/vendor/google.golang.org/grpc/internal/credentials/spiffe_appengine.go @@ -1,4 +1,4 @@ -// +build !go1.10 +// +build appengine /* * @@ -25,7 +25,7 @@ import ( "net/url" ) -//TODO(ZhenLian): delete this file when we remove Go 1.9 tests. +// SPIFFEIDFromState is a no-op for appengine builds. func SPIFFEIDFromState(state tls.ConnectionState) *url.URL { return nil } diff --git a/vendor/google.golang.org/grpc/credentials/internal/syscallconn.go b/vendor/google.golang.org/grpc/internal/credentials/syscallconn.go similarity index 96% rename from vendor/google.golang.org/grpc/credentials/internal/syscallconn.go rename to vendor/google.golang.org/grpc/internal/credentials/syscallconn.go index 2f4472becc7..f499a614c20 100644 --- a/vendor/google.golang.org/grpc/credentials/internal/syscallconn.go +++ b/vendor/google.golang.org/grpc/internal/credentials/syscallconn.go @@ -18,8 +18,7 @@ * */ -// Package internal contains credentials-internal code. -package internal +package credentials import ( "net" diff --git a/vendor/google.golang.org/grpc/credentials/internal/syscallconn_appengine.go b/vendor/google.golang.org/grpc/internal/credentials/syscallconn_appengine.go similarity index 97% rename from vendor/google.golang.org/grpc/credentials/internal/syscallconn_appengine.go rename to vendor/google.golang.org/grpc/internal/credentials/syscallconn_appengine.go index d4346e9eabe..a6144cd661c 100644 --- a/vendor/google.golang.org/grpc/credentials/internal/syscallconn_appengine.go +++ b/vendor/google.golang.org/grpc/internal/credentials/syscallconn_appengine.go @@ -18,7 +18,7 @@ * */ -package internal +package credentials import ( "net" diff --git a/vendor/google.golang.org/grpc/internal/credentials/util.go b/vendor/google.golang.org/grpc/internal/credentials/util.go new file mode 100644 index 00000000000..55664fa46b8 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/credentials/util.go @@ -0,0 +1,50 @@ +/* + * + * Copyright 2020 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package credentials + +import "crypto/tls" + +const alpnProtoStrH2 = "h2" + +// AppendH2ToNextProtos appends h2 to next protos. +func AppendH2ToNextProtos(ps []string) []string { + for _, p := range ps { + if p == alpnProtoStrH2 { + return ps + } + } + ret := make([]string, 0, len(ps)+1) + ret = append(ret, ps...) + return append(ret, alpnProtoStrH2) +} + +// CloneTLSConfig returns a shallow clone of the exported +// fields of cfg, ignoring the unexported sync.Once, which +// contains a mutex and must not be copied. +// +// If cfg is nil, a new zero tls.Config is returned. +// +// TODO: inline this function if possible. +func CloneTLSConfig(cfg *tls.Config) *tls.Config { + if cfg == nil { + return &tls.Config{} + } + + return cfg.Clone() +} diff --git a/vendor/google.golang.org/grpc/internal/grpclog/grpclog.go b/vendor/google.golang.org/grpc/internal/grpclog/grpclog.go index 745a166f02c..e6f975cbf6a 100644 --- a/vendor/google.golang.org/grpc/internal/grpclog/grpclog.go +++ b/vendor/google.golang.org/grpc/internal/grpclog/grpclog.go @@ -110,7 +110,10 @@ type LoggerV2 interface { // This is a copy of the DepthLoggerV2 defined in the external grpclog package. // It is defined here to avoid a circular dependency. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type DepthLoggerV2 interface { // InfoDepth logs to INFO log at the specified depth. Arguments are handled in the manner of fmt.Print. InfoDepth(depth int, args ...interface{}) diff --git a/vendor/google.golang.org/grpc/internal/grpcutil/target.go b/vendor/google.golang.org/grpc/internal/grpcutil/target.go index 80b33cdaf90..8833021da02 100644 --- a/vendor/google.golang.org/grpc/internal/grpcutil/target.go +++ b/vendor/google.golang.org/grpc/internal/grpcutil/target.go @@ -37,19 +37,53 @@ func split2(s, sep string) (string, string, bool) { } // ParseTarget splits target into a resolver.Target struct containing scheme, -// authority and endpoint. +// authority and endpoint. skipUnixColonParsing indicates that the parse should +// not parse "unix:[path]" cases. This should be true in cases where a custom +// dialer is present, to prevent a behavior change. // -// If target is not a valid scheme://authority/endpoint, it returns {Endpoint: -// target}. -func ParseTarget(target string) (ret resolver.Target) { +// If target is not a valid scheme://authority/endpoint as specified in +// https://github.com/grpc/grpc/blob/master/doc/naming.md, +// it returns {Endpoint: target}. +func ParseTarget(target string, skipUnixColonParsing bool) (ret resolver.Target) { var ok bool + if strings.HasPrefix(target, "unix-abstract:") { + if strings.HasPrefix(target, "unix-abstract://") { + // Maybe, with Authority specified, try to parse it + var remain string + ret.Scheme, remain, _ = split2(target, "://") + ret.Authority, ret.Endpoint, ok = split2(remain, "/") + if !ok { + // No Authority, add the "//" back + ret.Endpoint = "//" + remain + } else { + // Found Authority, add the "/" back + ret.Endpoint = "/" + ret.Endpoint + } + } else { + // Without Authority specified, split target on ":" + ret.Scheme, ret.Endpoint, _ = split2(target, ":") + } + return ret + } ret.Scheme, ret.Endpoint, ok = split2(target, "://") if !ok { + if strings.HasPrefix(target, "unix:") && !skipUnixColonParsing { + // Handle the "unix:[local/path]" and "unix:[/absolute/path]" cases, + // because splitting on :// only handles the + // "unix://[/absolute/path]" case. Only handle if the dialer is nil, + // to avoid a behavior change with custom dialers. + return resolver.Target{Scheme: "unix", Endpoint: target[len("unix:"):]} + } return resolver.Target{Endpoint: target} } ret.Authority, ret.Endpoint, ok = split2(ret.Endpoint, "/") if !ok { return resolver.Target{Endpoint: target} } + if ret.Scheme == "unix" { + // Add the "/" back in the unix case, so the unix resolver receives the + // actual endpoint in the "unix://[/absolute/path]" case. + ret.Endpoint = "/" + ret.Endpoint + } return ret } diff --git a/vendor/google.golang.org/grpc/internal/internal.go b/vendor/google.golang.org/grpc/internal/internal.go index 818ca857998..1e2834c70f6 100644 --- a/vendor/google.golang.org/grpc/internal/internal.go +++ b/vendor/google.golang.org/grpc/internal/internal.go @@ -52,6 +52,19 @@ var ( // This function compares the config without rawJSON stripped, in case the // there's difference in white space. EqualServiceConfigForTesting func(a, b serviceconfig.Config) bool + // GetCertificateProviderBuilder returns the registered builder for the + // given name. This is set by package certprovider for use from xDS + // bootstrap code while parsing certificate provider configs in the + // bootstrap file. + GetCertificateProviderBuilder interface{} // func(string) certprovider.Builder + // GetXDSHandshakeInfoForTesting returns a pointer to the xds.HandshakeInfo + // stored in the passed in attributes. This is set by + // credentials/xds/xds.go. + GetXDSHandshakeInfoForTesting interface{} // func (*attributes.Attributes) *xds.HandshakeInfo + // GetServerCredentials returns the transport credentials configured on a + // gRPC server. An xDS-enabled server needs to know what type of credentials + // is configured on the underlying gRPC server. This is set by server.go. + GetServerCredentials interface{} // func (*grpc.Server) credentials.TransportCredentials ) // HealthChecker defines the signature of the client-side LB channel health checking function. diff --git a/vendor/google.golang.org/grpc/internal/metadata/metadata.go b/vendor/google.golang.org/grpc/internal/metadata/metadata.go new file mode 100644 index 00000000000..302262613a0 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/metadata/metadata.go @@ -0,0 +1,50 @@ +/* + * + * Copyright 2020 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package metadata contains functions to set and get metadata from addresses. +// +// This package is experimental. +package metadata + +import ( + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/resolver" +) + +type mdKeyType string + +const mdKey = mdKeyType("grpc.internal.address.metadata") + +// Get returns the metadata of addr. +func Get(addr resolver.Address) metadata.MD { + attrs := addr.Attributes + if attrs == nil { + return nil + } + md, _ := attrs.Value(mdKey).(metadata.MD) + return md +} + +// Set sets (overrides) the metadata in addr. +// +// When a SubConn is created with this address, the RPCs sent on it will all +// have this metadata. +func Set(addr resolver.Address, md metadata.MD) resolver.Address { + addr.Attributes = addr.Attributes.WithValues(mdKey, md) + return addr +} diff --git a/vendor/google.golang.org/grpc/internal/resolver/config_selector.go b/vendor/google.golang.org/grpc/internal/resolver/config_selector.go new file mode 100644 index 00000000000..e6990040056 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/resolver/config_selector.go @@ -0,0 +1,95 @@ +/* + * + * Copyright 2020 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package resolver provides internal resolver-related functionality. +package resolver + +import ( + "context" + "sync" + + "google.golang.org/grpc/internal/serviceconfig" + "google.golang.org/grpc/resolver" +) + +// ConfigSelector controls what configuration to use for every RPC. +type ConfigSelector interface { + // Selects the configuration for the RPC, or terminates it using the error. + // This error will be converted by the gRPC library to a status error with + // code UNKNOWN if it is not returned as a status error. + SelectConfig(RPCInfo) (*RPCConfig, error) +} + +// RPCInfo contains RPC information needed by a ConfigSelector. +type RPCInfo struct { + // Context is the user's context for the RPC and contains headers and + // application timeout. It is passed for interception purposes and for + // efficiency reasons. SelectConfig should not be blocking. + Context context.Context + Method string // i.e. "/Service/Method" +} + +// RPCConfig describes the configuration to use for each RPC. +type RPCConfig struct { + // The context to use for the remainder of the RPC; can pass info to LB + // policy or affect timeout or metadata. + Context context.Context + MethodConfig serviceconfig.MethodConfig // configuration to use for this RPC + OnCommitted func() // Called when the RPC has been committed (retries no longer possible) +} + +type csKeyType string + +const csKey = csKeyType("grpc.internal.resolver.configSelector") + +// SetConfigSelector sets the config selector in state and returns the new +// state. +func SetConfigSelector(state resolver.State, cs ConfigSelector) resolver.State { + state.Attributes = state.Attributes.WithValues(csKey, cs) + return state +} + +// GetConfigSelector retrieves the config selector from state, if present, and +// returns it or nil if absent. +func GetConfigSelector(state resolver.State) ConfigSelector { + cs, _ := state.Attributes.Value(csKey).(ConfigSelector) + return cs +} + +// SafeConfigSelector allows for safe switching of ConfigSelector +// implementations such that previous values are guaranteed to not be in use +// when UpdateConfigSelector returns. +type SafeConfigSelector struct { + mu sync.RWMutex + cs ConfigSelector +} + +// UpdateConfigSelector swaps to the provided ConfigSelector and blocks until +// all uses of the previous ConfigSelector have completed. +func (scs *SafeConfigSelector) UpdateConfigSelector(cs ConfigSelector) { + scs.mu.Lock() + defer scs.mu.Unlock() + scs.cs = cs +} + +// SelectConfig defers to the current ConfigSelector in scs. +func (scs *SafeConfigSelector) SelectConfig(r RPCInfo) (*RPCConfig, error) { + scs.mu.RLock() + defer scs.mu.RUnlock() + return scs.cs.SelectConfig(r) +} diff --git a/vendor/google.golang.org/grpc/internal/resolver/unix/unix.go b/vendor/google.golang.org/grpc/internal/resolver/unix/unix.go new file mode 100644 index 00000000000..0d5a811ddfa --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/resolver/unix/unix.go @@ -0,0 +1,63 @@ +/* + * + * Copyright 2020 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package unix implements a resolver for unix targets. +package unix + +import ( + "fmt" + + "google.golang.org/grpc/internal/transport/networktype" + "google.golang.org/grpc/resolver" +) + +const unixScheme = "unix" +const unixAbstractScheme = "unix-abstract" + +type builder struct { + scheme string +} + +func (b *builder) Build(target resolver.Target, cc resolver.ClientConn, _ resolver.BuildOptions) (resolver.Resolver, error) { + if target.Authority != "" { + return nil, fmt.Errorf("invalid (non-empty) authority: %v", target.Authority) + } + addr := resolver.Address{Addr: target.Endpoint} + if b.scheme == unixAbstractScheme { + // prepend "\x00" to address for unix-abstract + addr.Addr = "\x00" + addr.Addr + } + cc.UpdateState(resolver.State{Addresses: []resolver.Address{networktype.Set(addr, "unix")}}) + return &nopResolver{}, nil +} + +func (b *builder) Scheme() string { + return b.scheme +} + +type nopResolver struct { +} + +func (*nopResolver) ResolveNow(resolver.ResolveNowOptions) {} + +func (*nopResolver) Close() {} + +func init() { + resolver.Register(&builder{scheme: unixScheme}) + resolver.Register(&builder{scheme: unixAbstractScheme}) +} diff --git a/vendor/google.golang.org/grpc/internal/serviceconfig/serviceconfig.go b/vendor/google.golang.org/grpc/internal/serviceconfig/serviceconfig.go index 9b26414d40e..bd4b8875f1a 100644 --- a/vendor/google.golang.org/grpc/internal/serviceconfig/serviceconfig.go +++ b/vendor/google.golang.org/grpc/internal/serviceconfig/serviceconfig.go @@ -22,14 +22,21 @@ package serviceconfig import ( "encoding/json" "fmt" + "time" "google.golang.org/grpc/balancer" + "google.golang.org/grpc/codes" "google.golang.org/grpc/grpclog" externalserviceconfig "google.golang.org/grpc/serviceconfig" ) -// BalancerConfig is the balancer config part that service config's -// loadBalancingConfig fields can be unmarshalled to. It's a json unmarshaller. +var logger = grpclog.Component("core") + +// BalancerConfig wraps the name and config associated with one load balancing +// policy. It corresponds to a single entry of the loadBalancingConfig field +// from ServiceConfig. +// +// It implements the json.Unmarshaler interface. // // https://github.com/grpc/grpc-proto/blob/54713b1e8bc6ed2d4f25fb4dff527842150b91b2/grpc/service_config/service_config.proto#L247 type BalancerConfig struct { @@ -39,7 +46,15 @@ type BalancerConfig struct { type intermediateBalancerConfig []map[string]json.RawMessage -// UnmarshalJSON implements json unmarshaller. +// UnmarshalJSON implements the json.Unmarshaler interface. +// +// ServiceConfig contains a list of loadBalancingConfigs, each with a name and +// config. This method iterates through that list in order, and stops at the +// first policy that is supported. +// - If the config for the first supported policy is invalid, the whole service +// config is invalid. +// - If the list doesn't contain any supported policy, the whole service config +// is invalid. func (bc *BalancerConfig) UnmarshalJSON(b []byte) error { var ir intermediateBalancerConfig err := json.Unmarshal(b, &ir) @@ -51,13 +66,16 @@ func (bc *BalancerConfig) UnmarshalJSON(b []byte) error { if len(lbcfg) != 1 { return fmt.Errorf("invalid loadBalancingConfig: entry %v does not contain exactly 1 policy/config pair: %q", i, lbcfg) } + var ( name string jsonCfg json.RawMessage ) - // Get the key:value pair from the map. + // Get the key:value pair from the map. We have already made sure that + // the map contains a single entry. for name, jsonCfg = range lbcfg { } + builder := balancer.Get(name) if builder == nil { // If the balancer is not registered, move on to the next config. @@ -69,7 +87,7 @@ func (bc *BalancerConfig) UnmarshalJSON(b []byte) error { parser, ok := builder.(balancer.ConfigParser) if !ok { if string(jsonCfg) != "{}" { - grpclog.Warningf("non-empty balancer configuration %q, but balancer does not implement ParseConfig", string(jsonCfg)) + logger.Warningf("non-empty balancer configuration %q, but balancer does not implement ParseConfig", string(jsonCfg)) } // Stop at this, though the builder doesn't support parsing config. return nil @@ -88,3 +106,57 @@ func (bc *BalancerConfig) UnmarshalJSON(b []byte) error { // case. return fmt.Errorf("invalid loadBalancingConfig: no supported policies found") } + +// MethodConfig defines the configuration recommended by the service providers for a +// particular method. +type MethodConfig struct { + // WaitForReady indicates whether RPCs sent to this method should wait until + // the connection is ready by default (!failfast). The value specified via the + // gRPC client API will override the value set here. + WaitForReady *bool + // Timeout is the default timeout for RPCs sent to this method. The actual + // deadline used will be the minimum of the value specified here and the value + // set by the application via the gRPC client API. If either one is not set, + // then the other will be used. If neither is set, then the RPC has no deadline. + Timeout *time.Duration + // MaxReqSize is the maximum allowed payload size for an individual request in a + // stream (client->server) in bytes. The size which is measured is the serialized + // payload after per-message compression (but before stream compression) in bytes. + // The actual value used is the minimum of the value specified here and the value set + // by the application via the gRPC client API. If either one is not set, then the other + // will be used. If neither is set, then the built-in default is used. + MaxReqSize *int + // MaxRespSize is the maximum allowed payload size for an individual response in a + // stream (server->client) in bytes. + MaxRespSize *int + // RetryPolicy configures retry options for the method. + RetryPolicy *RetryPolicy +} + +// RetryPolicy defines the go-native version of the retry policy defined by the +// service config here: +// https://github.com/grpc/proposal/blob/master/A6-client-retries.md#integration-with-service-config +type RetryPolicy struct { + // MaxAttempts is the maximum number of attempts, including the original RPC. + // + // This field is required and must be two or greater. + MaxAttempts int + + // Exponential backoff parameters. The initial retry attempt will occur at + // random(0, initialBackoff). In general, the nth attempt will occur at + // random(0, + // min(initialBackoff*backoffMultiplier**(n-1), maxBackoff)). + // + // These fields are required and must be greater than zero. + InitialBackoff time.Duration + MaxBackoff time.Duration + BackoffMultiplier float64 + + // The set of status codes which may be retried. + // + // Status codes are specified as strings, e.g., "UNAVAILABLE". + // + // This field is required and must be non-empty. + // Note: a set is used to store this for easy lookup. + RetryableStatusCodes map[codes.Code]bool +} diff --git a/vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go b/vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go index c50468a0fc8..4b2964f2a1e 100644 --- a/vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go +++ b/vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go @@ -43,26 +43,24 @@ func GetCPUTime() int64 { return ts.Nano() } -// Rusage is an alias for syscall.Rusage under linux non-appengine environment. -type Rusage syscall.Rusage +// Rusage is an alias for syscall.Rusage under linux environment. +type Rusage = syscall.Rusage // GetRusage returns the resource usage of current process. -func GetRusage() (rusage *Rusage) { - rusage = new(Rusage) - syscall.Getrusage(syscall.RUSAGE_SELF, (*syscall.Rusage)(rusage)) - return +func GetRusage() *Rusage { + rusage := new(Rusage) + syscall.Getrusage(syscall.RUSAGE_SELF, rusage) + return rusage } // CPUTimeDiff returns the differences of user CPU time and system CPU time used // between two Rusage structs. func CPUTimeDiff(first *Rusage, latest *Rusage) (float64, float64) { - f := (*syscall.Rusage)(first) - l := (*syscall.Rusage)(latest) var ( - utimeDiffs = l.Utime.Sec - f.Utime.Sec - utimeDiffus = l.Utime.Usec - f.Utime.Usec - stimeDiffs = l.Stime.Sec - f.Stime.Sec - stimeDiffus = l.Stime.Usec - f.Stime.Usec + utimeDiffs = latest.Utime.Sec - first.Utime.Sec + utimeDiffus = latest.Utime.Usec - first.Utime.Usec + stimeDiffs = latest.Stime.Sec - first.Stime.Sec + stimeDiffus = latest.Stime.Usec - first.Stime.Usec ) uTimeElapsed := float64(utimeDiffs) + float64(utimeDiffus)*1.0e-6 diff --git a/vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go b/vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go index adae60d6518..7913ef1dbfb 100644 --- a/vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go +++ b/vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go @@ -50,7 +50,7 @@ func GetCPUTime() int64 { type Rusage struct{} // GetRusage is a no-op function under non-linux or appengine environment. -func GetRusage() (rusage *Rusage) { +func GetRusage() *Rusage { log() return nil } diff --git a/vendor/google.golang.org/grpc/internal/transport/http2_client.go b/vendor/google.golang.org/grpc/internal/transport/http2_client.go index e7f2321131e..8902b7f90d9 100644 --- a/vendor/google.golang.org/grpc/internal/transport/http2_client.go +++ b/vendor/google.golang.org/grpc/internal/transport/http2_client.go @@ -33,6 +33,8 @@ import ( "golang.org/x/net/http2" "golang.org/x/net/http2/hpack" "google.golang.org/grpc/internal/grpcutil" + imetadata "google.golang.org/grpc/internal/metadata" + "google.golang.org/grpc/internal/transport/networktype" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials" @@ -59,7 +61,7 @@ type http2Client struct { cancel context.CancelFunc ctxDone <-chan struct{} // Cache the ctx.Done() chan. userAgent string - md interface{} + md metadata.MD conn net.Conn // underlying communication channel loopy *loopyWriter remoteAddr net.Addr @@ -137,11 +139,27 @@ type http2Client struct { connectionID uint64 } -func dial(ctx context.Context, fn func(context.Context, string) (net.Conn, error), addr string) (net.Conn, error) { +func dial(ctx context.Context, fn func(context.Context, string) (net.Conn, error), addr resolver.Address, useProxy bool, grpcUA string) (net.Conn, error) { + address := addr.Addr + networkType, ok := networktype.Get(addr) if fn != nil { - return fn(ctx, addr) + if networkType == "unix" && !strings.HasPrefix(address, "\x00") { + // For backward compatibility, if the user dialed "unix:///path", + // the passthrough resolver would be used and the user's custom + // dialer would see "unix:///path". Since the unix resolver is used + // and the address is now "/path", prepend "unix://" so the user's + // custom dialer sees the same address. + return fn(ctx, "unix://"+address) + } + return fn(ctx, address) + } + if !ok { + networkType, address = parseDialTarget(address) } - return (&net.Dialer{}).DialContext(ctx, "tcp", addr) + if networkType == "tcp" && useProxy { + return proxyDial(ctx, address, grpcUA) + } + return (&net.Dialer{}).DialContext(ctx, networkType, address) } func isTemporary(err error) bool { @@ -172,7 +190,7 @@ func newHTTP2Client(connectCtx, ctx context.Context, addr resolver.Address, opts } }() - conn, err := dial(connectCtx, opts.Dialer, addr.Addr) + conn, err := dial(connectCtx, opts.Dialer, addr, opts.UseProxy, opts.UserAgent) if err != nil { if opts.FailOnNonTempDialError { return nil, connectionErrorf(isTemporary(err), err, "transport: error while dialing: %v", err) @@ -226,6 +244,18 @@ func newHTTP2Client(connectCtx, ctx context.Context, addr resolver.Address, opts if err != nil { return nil, connectionErrorf(isTemporary(err), err, "transport: authentication handshake failed: %v", err) } + for _, cd := range perRPCCreds { + if cd.RequireTransportSecurity() { + if ci, ok := authInfo.(interface { + GetCommonAuthInfo() credentials.CommonAuthInfo + }); ok { + secLevel := ci.GetCommonAuthInfo().SecurityLevel + if secLevel != credentials.InvalidSecurityLevel && secLevel < credentials.PrivacyAndIntegrity { + return nil, connectionErrorf(true, nil, "transport: cannot send secure credentials on an insecure connection") + } + } + } + } isSecure = true if transportCreds.Info().SecurityProtocol == "tls" { scheme = "https" @@ -248,7 +278,6 @@ func newHTTP2Client(connectCtx, ctx context.Context, addr resolver.Address, opts ctxDone: ctx.Done(), // Cache Done chan. cancel: cancel, userAgent: opts.UserAgent, - md: addr.Metadata, conn: conn, remoteAddr: conn.RemoteAddr(), localAddr: conn.LocalAddr(), @@ -276,6 +305,12 @@ func newHTTP2Client(connectCtx, ctx context.Context, addr resolver.Address, opts keepaliveEnabled: keepaliveEnabled, bufferPool: newBufferPool(), } + + if md, ok := addr.Metadata.(*metadata.MD); ok { + t.md = *md + } else if md := imetadata.Get(addr); md != nil { + t.md = md + } t.controlBuf = newControlBuffer(t.ctxDone) if opts.InitialWindowSize >= defaultWindowSize { t.initialWindowSize = opts.InitialWindowSize @@ -481,25 +516,23 @@ func (t *http2Client) createHeaderFields(ctx context.Context, callHdr *CallHdr) for _, vv := range added { for i, v := range vv { if i%2 == 0 { - k = v + k = strings.ToLower(v) continue } // HTTP doesn't allow you to set pseudoheaders after non pseudoheaders were set. if isReservedHeader(k) { continue } - headerFields = append(headerFields, hpack.HeaderField{Name: strings.ToLower(k), Value: encodeMetadataHeader(k, v)}) + headerFields = append(headerFields, hpack.HeaderField{Name: k, Value: encodeMetadataHeader(k, v)}) } } } - if md, ok := t.md.(*metadata.MD); ok { - for k, vv := range *md { - if isReservedHeader(k) { - continue - } - for _, v := range vv { - headerFields = append(headerFields, hpack.HeaderField{Name: k, Value: encodeMetadataHeader(k, v)}) - } + for k, vv := range t.md { + if isReservedHeader(k) { + continue + } + for _, v := range vv { + headerFields = append(headerFields, hpack.HeaderField{Name: k, Value: encodeMetadataHeader(k, v)}) } } return headerFields, nil @@ -549,8 +582,11 @@ func (t *http2Client) getCallAuthData(ctx context.Context, audience string, call // Note: if these credentials are provided both via dial options and call // options, then both sets of credentials will be applied. if callCreds := callHdr.Creds; callCreds != nil { - if !t.isSecure && callCreds.RequireTransportSecurity() { - return nil, status.Error(codes.Unauthenticated, "transport: cannot send secure credentials on an insecure connection") + if callCreds.RequireTransportSecurity() { + ri, _ := credentials.RequestInfoFromContext(ctx) + if !t.isSecure || credentials.CheckSecurityLevel(ri.AuthInfo, credentials.PrivacyAndIntegrity) != nil { + return nil, status.Error(codes.Unauthenticated, "transport: cannot send secure credentials on an insecure connection") + } } data, err := callCreds.GetRequestMetadata(ctx, audience) if err != nil { @@ -1206,8 +1242,8 @@ func (t *http2Client) operateHeaders(frame *http2.MetaHeadersFrame) { state := &decodeState{} // Initialize isGRPC value to be !initialHeader, since if a gRPC Response-Headers has already been received, then it means that the peer is speaking gRPC and we are in gRPC mode. state.data.isGRPC = !initialHeader - if err := state.decodeHeader(frame); err != nil { - t.closeStream(s, err, true, http2.ErrCodeProtocol, status.Convert(err), nil, endStream) + if h2code, err := state.decodeHeader(frame); err != nil { + t.closeStream(s, err, true, h2code, status.Convert(err), nil, endStream) return } @@ -1306,7 +1342,13 @@ func (t *http2Client) reader() { if s != nil { // use error detail to provide better err message code := http2ErrConvTab[se.Code] - msg := t.framer.fr.ErrorDetail().Error() + errorDetail := t.framer.fr.ErrorDetail() + var msg string + if errorDetail != nil { + msg = errorDetail.Error() + } else { + msg = "received invalid frame" + } t.closeStream(s, status.Error(code, msg), true, http2.ErrCodeProtocol, status.New(code, msg), nil, false) } continue diff --git a/vendor/google.golang.org/grpc/internal/transport/http2_server.go b/vendor/google.golang.org/grpc/internal/transport/http2_server.go index 04cbedf7945..0cf1cc320bb 100644 --- a/vendor/google.golang.org/grpc/internal/transport/http2_server.go +++ b/vendor/google.golang.org/grpc/internal/transport/http2_server.go @@ -306,12 +306,12 @@ func (t *http2Server) operateHeaders(frame *http2.MetaHeadersFrame, handle func( state := &decodeState{ serverSide: true, } - if err := state.decodeHeader(frame); err != nil { - if se, ok := status.FromError(err); ok { + if h2code, err := state.decodeHeader(frame); err != nil { + if _, ok := status.FromError(err); ok { t.controlBuf.put(&cleanupStream{ streamID: streamID, rst: true, - rstCode: statusCodeConvTab[se.Code()], + rstCode: h2code, onWrite: func() {}, }) } @@ -611,6 +611,10 @@ func (t *http2Server) handleData(f *http2.DataFrame) { if !ok { return } + if s.getState() == streamReadDone { + t.closeStream(s, true, http2.ErrCodeStreamClosed, false) + return + } if size > 0 { if err := s.fc.onData(size); err != nil { t.closeStream(s, true, http2.ErrCodeFlowControl, false) diff --git a/vendor/google.golang.org/grpc/internal/transport/http_util.go b/vendor/google.golang.org/grpc/internal/transport/http_util.go index 5e1e7a65da2..7e41d1183f9 100644 --- a/vendor/google.golang.org/grpc/internal/transport/http_util.go +++ b/vendor/google.golang.org/grpc/internal/transport/http_util.go @@ -27,6 +27,7 @@ import ( "math" "net" "net/http" + "net/url" "strconv" "strings" "time" @@ -73,13 +74,6 @@ var ( http2.ErrCodeInadequateSecurity: codes.PermissionDenied, http2.ErrCodeHTTP11Required: codes.Internal, } - statusCodeConvTab = map[codes.Code]http2.ErrCode{ - codes.Internal: http2.ErrCodeInternal, - codes.Canceled: http2.ErrCodeCancel, - codes.Unavailable: http2.ErrCodeRefusedStream, - codes.ResourceExhausted: http2.ErrCodeEnhanceYourCalm, - codes.PermissionDenied: http2.ErrCodeInadequateSecurity, - } // HTTPStatusConvTab is the HTTP status code to gRPC error code conversion table. HTTPStatusConvTab = map[int]codes.Code{ // 400 Bad Request - INTERNAL. @@ -222,11 +216,11 @@ func decodeMetadataHeader(k, v string) (string, error) { return v, nil } -func (d *decodeState) decodeHeader(frame *http2.MetaHeadersFrame) error { +func (d *decodeState) decodeHeader(frame *http2.MetaHeadersFrame) (http2.ErrCode, error) { // frame.Truncated is set to true when framer detects that the current header // list size hits MaxHeaderListSize limit. if frame.Truncated { - return status.Error(codes.Internal, "peer header list size exceeded limit") + return http2.ErrCodeFrameSize, status.Error(codes.Internal, "peer header list size exceeded limit") } for _, hf := range frame.Fields { @@ -235,10 +229,10 @@ func (d *decodeState) decodeHeader(frame *http2.MetaHeadersFrame) error { if d.data.isGRPC { if d.data.grpcErr != nil { - return d.data.grpcErr + return http2.ErrCodeProtocol, d.data.grpcErr } if d.serverSide { - return nil + return http2.ErrCodeNo, nil } if d.data.rawStatusCode == nil && d.data.statusGen == nil { // gRPC status doesn't exist. @@ -250,12 +244,12 @@ func (d *decodeState) decodeHeader(frame *http2.MetaHeadersFrame) error { code := int(codes.Unknown) d.data.rawStatusCode = &code } - return nil + return http2.ErrCodeNo, nil } // HTTP fallback mode if d.data.httpErr != nil { - return d.data.httpErr + return http2.ErrCodeProtocol, d.data.httpErr } var ( @@ -270,7 +264,7 @@ func (d *decodeState) decodeHeader(frame *http2.MetaHeadersFrame) error { } } - return status.Error(code, d.constructHTTPErrMsg()) + return http2.ErrCodeProtocol, status.Error(code, d.constructHTTPErrMsg()) } // constructErrMsg constructs error message to be returned in HTTP fallback mode. @@ -605,3 +599,31 @@ func newFramer(conn net.Conn, writeBufferSize, readBufferSize int, maxHeaderList f.fr.ReadMetaHeaders = hpack.NewDecoder(http2InitHeaderTableSize, nil) return f } + +// parseDialTarget returns the network and address to pass to dialer. +func parseDialTarget(target string) (string, string) { + net := "tcp" + m1 := strings.Index(target, ":") + m2 := strings.Index(target, ":/") + // handle unix:addr which will fail with url.Parse + if m1 >= 0 && m2 < 0 { + if n := target[0:m1]; n == "unix" { + return n, target[m1+1:] + } + } + if m2 >= 0 { + t, err := url.Parse(target) + if err != nil { + return net, target + } + scheme := t.Scheme + addr := t.Path + if scheme == "unix" { + if addr == "" { + addr = t.Host + } + return scheme, addr + } + } + return net, target +} diff --git a/vendor/google.golang.org/grpc/internal/transport/networktype/networktype.go b/vendor/google.golang.org/grpc/internal/transport/networktype/networktype.go new file mode 100644 index 00000000000..96967428b51 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/networktype/networktype.go @@ -0,0 +1,46 @@ +/* + * + * Copyright 2020 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package networktype declares the network type to be used in the default +// dailer. Attribute of a resolver.Address. +package networktype + +import ( + "google.golang.org/grpc/resolver" +) + +// keyType is the key to use for storing State in Attributes. +type keyType string + +const key = keyType("grpc.internal.transport.networktype") + +// Set returns a copy of the provided address with attributes containing networkType. +func Set(address resolver.Address, networkType string) resolver.Address { + address.Attributes = address.Attributes.WithValues(key, networkType) + return address +} + +// Get returns the network type in the resolver.Address and true, or "", false +// if not present. +func Get(address resolver.Address) (string, bool) { + v := address.Attributes.Value(key) + if v == nil { + return "", false + } + return v.(string), true +} diff --git a/vendor/google.golang.org/grpc/proxy.go b/vendor/google.golang.org/grpc/internal/transport/proxy.go similarity index 73% rename from vendor/google.golang.org/grpc/proxy.go rename to vendor/google.golang.org/grpc/internal/transport/proxy.go index f8f69bfb70f..a662bf39a6c 100644 --- a/vendor/google.golang.org/grpc/proxy.go +++ b/vendor/google.golang.org/grpc/internal/transport/proxy.go @@ -16,13 +16,12 @@ * */ -package grpc +package transport import ( "bufio" "context" "encoding/base64" - "errors" "fmt" "io" "net" @@ -34,8 +33,6 @@ import ( const proxyAuthHeaderKey = "Proxy-Authorization" var ( - // errDisabled indicates that proxy is disabled for the address. - errDisabled = errors.New("proxy is disabled for the address") // The following variable will be overwritten in the tests. httpProxyFromEnvironment = http.ProxyFromEnvironment ) @@ -51,9 +48,6 @@ func mapAddress(ctx context.Context, address string) (*url.URL, error) { if err != nil { return nil, err } - if url == nil { - return nil, errDisabled - } return url, nil } @@ -76,7 +70,7 @@ func basicAuth(username, password string) string { return base64.StdEncoding.EncodeToString([]byte(auth)) } -func doHTTPConnectHandshake(ctx context.Context, conn net.Conn, backendAddr string, proxyURL *url.URL) (_ net.Conn, err error) { +func doHTTPConnectHandshake(ctx context.Context, conn net.Conn, backendAddr string, proxyURL *url.URL, grpcUA string) (_ net.Conn, err error) { defer func() { if err != nil { conn.Close() @@ -115,32 +109,28 @@ func doHTTPConnectHandshake(ctx context.Context, conn net.Conn, backendAddr stri return &bufConn{Conn: conn, r: r}, nil } -// newProxyDialer returns a dialer that connects to proxy first if necessary. -// The returned dialer checks if a proxy is necessary, dial to the proxy with the -// provided dialer, does HTTP CONNECT handshake and returns the connection. -func newProxyDialer(dialer func(context.Context, string) (net.Conn, error)) func(context.Context, string) (net.Conn, error) { - return func(ctx context.Context, addr string) (conn net.Conn, err error) { - var newAddr string - proxyURL, err := mapAddress(ctx, addr) - if err != nil { - if err != errDisabled { - return nil, err - } - newAddr = addr - } else { - newAddr = proxyURL.Host - } +// proxyDial dials, connecting to a proxy first if necessary. Checks if a proxy +// is necessary, dials, does the HTTP CONNECT handshake, and returns the +// connection. +func proxyDial(ctx context.Context, addr string, grpcUA string) (conn net.Conn, err error) { + newAddr := addr + proxyURL, err := mapAddress(ctx, addr) + if err != nil { + return nil, err + } + if proxyURL != nil { + newAddr = proxyURL.Host + } - conn, err = dialer(ctx, newAddr) - if err != nil { - return - } - if proxyURL != nil { - // proxy is disabled if proxyURL is nil. - conn, err = doHTTPConnectHandshake(ctx, conn, addr, proxyURL) - } + conn, err = (&net.Dialer{}).DialContext(ctx, "tcp", newAddr) + if err != nil { return } + if proxyURL != nil { + // proxy is disabled if proxyURL is nil. + conn, err = doHTTPConnectHandshake(ctx, conn, addr, proxyURL, grpcUA) + } + return } func sendHTTPRequest(ctx context.Context, req *http.Request, conn net.Conn) error { diff --git a/vendor/google.golang.org/grpc/internal/transport/transport.go b/vendor/google.golang.org/grpc/internal/transport/transport.go index b74030a9687..9c8f79cb4b2 100644 --- a/vendor/google.golang.org/grpc/internal/transport/transport.go +++ b/vendor/google.golang.org/grpc/internal/transport/transport.go @@ -569,6 +569,8 @@ type ConnectOptions struct { ChannelzParentID int64 // MaxHeaderListSize sets the max (uncompressed) size of header list that is prepared to be received. MaxHeaderListSize *uint32 + // UseProxy specifies if a proxy should be used. + UseProxy bool } // NewClientTransport establishes the transport with the required ConnectOptions diff --git a/vendor/google.golang.org/grpc/preloader.go b/vendor/google.golang.org/grpc/preloader.go index 76acbbcc93b..0a1e975ad91 100644 --- a/vendor/google.golang.org/grpc/preloader.go +++ b/vendor/google.golang.org/grpc/preloader.go @@ -25,7 +25,10 @@ import ( // PreparedMsg is responsible for creating a Marshalled and Compressed object. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type PreparedMsg struct { // Struct for preparing msg before sending them encodedData []byte diff --git a/vendor/google.golang.org/grpc/regenerate.sh b/vendor/google.golang.org/grpc/regenerate.sh index 987bc20251d..fc6725b89f8 100644 --- a/vendor/google.golang.org/grpc/regenerate.sh +++ b/vendor/google.golang.org/grpc/regenerate.sh @@ -26,8 +26,13 @@ export GOBIN=${WORKDIR}/bin export PATH=${GOBIN}:${PATH} mkdir -p ${GOBIN} -echo "go install github.com/golang/protobuf/protoc-gen-go" -(cd test/tools && go install github.com/golang/protobuf/protoc-gen-go) +echo "remove existing generated files" +# grpc_testingv3/testv3.pb.go is not re-generated because it was +# intentionally generated by an older version of protoc-gen-go. +rm -f $(find . -name '*.pb.go' | grep -v 'grpc_testingv3/testv3.pb.go') + +echo "go install google.golang.org/protobuf/cmd/protoc-gen-go" +(cd test/tools && go install google.golang.org/protobuf/cmd/protoc-gen-go) echo "go install cmd/protoc-gen-go-grpc" (cd cmd/protoc-gen-go-grpc && go install .) @@ -35,32 +40,69 @@ echo "go install cmd/protoc-gen-go-grpc" echo "git clone https://github.com/grpc/grpc-proto" git clone --quiet https://github.com/grpc/grpc-proto ${WORKDIR}/grpc-proto +echo "git clone https://github.com/protocolbuffers/protobuf" +git clone --quiet https://github.com/protocolbuffers/protobuf ${WORKDIR}/protobuf + +# Pull in code.proto as a proto dependency mkdir -p ${WORKDIR}/googleapis/google/rpc echo "curl https://raw.githubusercontent.com/googleapis/googleapis/master/google/rpc/code.proto" curl --silent https://raw.githubusercontent.com/googleapis/googleapis/master/google/rpc/code.proto > ${WORKDIR}/googleapis/google/rpc/code.proto +# Pull in the MeshCA service proto. +mkdir -p ${WORKDIR}/istio/istio/google/security/meshca/v1 +echo "curl https://raw.githubusercontent.com/istio/istio/master/security/proto/providers/google/meshca.proto" +curl --silent https://raw.githubusercontent.com/istio/istio/master/security/proto/providers/google/meshca.proto > ${WORKDIR}/istio/istio/google/security/meshca/v1/meshca.proto + mkdir -p ${WORKDIR}/out -SOURCES=( - ${WORKDIR}/googleapis/google/rpc/code.proto +# Generates sources without the embed requirement +LEGACY_SOURCES=( ${WORKDIR}/grpc-proto/grpc/binlog/v1/binarylog.proto ${WORKDIR}/grpc-proto/grpc/channelz/v1/channelz.proto + ${WORKDIR}/grpc-proto/grpc/health/v1/health.proto + ${WORKDIR}/grpc-proto/grpc/lb/v1/load_balancer.proto + profiling/proto/service.proto + reflection/grpc_reflection_v1alpha/reflection.proto +) + +# Generates only the new gRPC Service symbols +SOURCES=( + $(git ls-files --exclude-standard --cached --others "*.proto" | grep -v '^\(profiling/proto/service.proto\|reflection/grpc_reflection_v1alpha/reflection.proto\)$') ${WORKDIR}/grpc-proto/grpc/gcp/altscontext.proto ${WORKDIR}/grpc-proto/grpc/gcp/handshaker.proto ${WORKDIR}/grpc-proto/grpc/gcp/transport_security_common.proto - ${WORKDIR}/grpc-proto/grpc/health/v1/health.proto - ${WORKDIR}/grpc-proto/grpc/lb/v1/load_balancer.proto ${WORKDIR}/grpc-proto/grpc/lookup/v1/rls.proto + ${WORKDIR}/grpc-proto/grpc/lookup/v1/rls_config.proto ${WORKDIR}/grpc-proto/grpc/service_config/service_config.proto - $(git ls-files --exclude-standard --cached --others "*.proto") + ${WORKDIR}/grpc-proto/grpc/testing/*.proto + ${WORKDIR}/grpc-proto/grpc/core/*.proto + ${WORKDIR}/istio/istio/google/security/meshca/v1/meshca.proto ) -OPTS=Mgrpc/service_config/service_config.proto=/internal/proto/grpc_service_config + +# These options of the form 'Mfoo.proto=bar' instruct the codegen to use an +# import path of 'bar' in the generated code when 'foo.proto' is imported in +# one of the sources. +OPTS=Mgrpc/service_config/service_config.proto=/internal/proto/grpc_service_config,Mgrpc/core/stats.proto=google.golang.org/grpc/interop/grpc_testing/core + for src in ${SOURCES[@]}; do echo "protoc ${src}" - protoc --go_out=${OPTS}:${WORKDIR}/out --go-grpc_out=${OPTS},requireUnimplementedServers=false:${WORKDIR}/out \ + protoc --go_out=${OPTS}:${WORKDIR}/out --go-grpc_out=${OPTS}:${WORKDIR}/out \ + -I"." \ + -I${WORKDIR}/grpc-proto \ + -I${WORKDIR}/googleapis \ + -I${WORKDIR}/protobuf/src \ + -I${WORKDIR}/istio \ + ${src} +done + +for src in ${LEGACY_SOURCES[@]}; do + echo "protoc ${src}" + protoc --go_out=${OPTS}:${WORKDIR}/out --go-grpc_out=${OPTS},require_unimplemented_servers=false:${WORKDIR}/out \ -I"." \ -I${WORKDIR}/grpc-proto \ -I${WORKDIR}/googleapis \ + -I${WORKDIR}/protobuf/src \ + -I${WORKDIR}/istio \ ${src} done @@ -74,6 +116,14 @@ mv ${WORKDIR}/out/google.golang.org/grpc/lookup/grpc_lookup_v1/* ${WORKDIR}/out/ rm ${WORKDIR}/out/google.golang.org/grpc/reflection/grpc_testingv3/*.pb.go # grpc/service_config/service_config.proto does not have a go_package option. -cp ${WORKDIR}/out/grpc/service_config/service_config.pb.go internal/proto/grpc_service_config +mv ${WORKDIR}/out/grpc/service_config/service_config.pb.go internal/proto/grpc_service_config + +# grpc/testing does not have a go_package option. +mv ${WORKDIR}/out/grpc/testing/*.pb.go interop/grpc_testing/ +mv ${WORKDIR}/out/grpc/core/*.pb.go interop/grpc_testing/core/ + +# istio/google/security/meshca/v1/meshca.proto does not have a go_package option. +mkdir -p ${WORKDIR}/out/google.golang.org/grpc/credentials/tls/certprovider/meshca/internal/v1/ +mv ${WORKDIR}/out/istio/google/security/meshca/v1/* ${WORKDIR}/out/google.golang.org/grpc/credentials/tls/certprovider/meshca/internal/v1/ cp -R ${WORKDIR}/out/google.golang.org/grpc/* . diff --git a/vendor/google.golang.org/grpc/resolver/resolver.go b/vendor/google.golang.org/grpc/resolver/resolver.go index 379275a2d9b..e9fa8e33d92 100644 --- a/vendor/google.golang.org/grpc/resolver/resolver.go +++ b/vendor/google.golang.org/grpc/resolver/resolver.go @@ -93,7 +93,11 @@ const ( ) // Address represents a server the client connects to. -// This is the EXPERIMENTAL API and may be changed or extended in the future. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type Address struct { // Addr is the server address on which a connection will be established. Addr string diff --git a/vendor/google.golang.org/grpc/resolver_conn_wrapper.go b/vendor/google.golang.org/grpc/resolver_conn_wrapper.go index 265002a75e0..f2d81968f9e 100644 --- a/vendor/google.golang.org/grpc/resolver_conn_wrapper.go +++ b/vendor/google.golang.org/grpc/resolver_conn_wrapper.go @@ -217,6 +217,6 @@ func (ccr *ccResolverWrapper) addChannelzTraceEvent(s resolver.State) { } channelz.AddTraceEvent(logger, ccr.cc.channelzID, 0, &channelz.TraceEventDesc{ Desc: fmt.Sprintf("Resolver state updated: %+v (%v)", s, strings.Join(updates, "; ")), - Severity: channelz.CtINFO, + Severity: channelz.CtInfo, }) } diff --git a/vendor/google.golang.org/grpc/rpc_util.go b/vendor/google.golang.org/grpc/rpc_util.go index 8644b8a7d0d..c0a1208f2f3 100644 --- a/vendor/google.golang.org/grpc/rpc_util.go +++ b/vendor/google.golang.org/grpc/rpc_util.go @@ -27,7 +27,6 @@ import ( "io" "io/ioutil" "math" - "net/url" "strings" "sync" "time" @@ -198,7 +197,11 @@ func Header(md *metadata.MD) CallOption { // HeaderCallOption is a CallOption for collecting response header metadata. // The metadata field will be populated *after* the RPC completes. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type HeaderCallOption struct { HeaderAddr *metadata.MD } @@ -216,7 +219,11 @@ func Trailer(md *metadata.MD) CallOption { // TrailerCallOption is a CallOption for collecting response trailer metadata. // The metadata field will be populated *after* the RPC completes. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type TrailerCallOption struct { TrailerAddr *metadata.MD } @@ -234,7 +241,11 @@ func Peer(p *peer.Peer) CallOption { // PeerCallOption is a CallOption for collecting the identity of the remote // peer. The peer field will be populated *after* the RPC completes. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type PeerCallOption struct { PeerAddr *peer.Peer } @@ -269,7 +280,11 @@ func FailFast(failFast bool) CallOption { // FailFastCallOption is a CallOption for indicating whether an RPC should fail // fast or not. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type FailFastCallOption struct { FailFast bool } @@ -288,7 +303,11 @@ func MaxCallRecvMsgSize(bytes int) CallOption { // MaxRecvMsgSizeCallOption is a CallOption that indicates the maximum message // size in bytes the client can receive. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type MaxRecvMsgSizeCallOption struct { MaxRecvMsgSize int } @@ -307,7 +326,11 @@ func MaxCallSendMsgSize(bytes int) CallOption { // MaxSendMsgSizeCallOption is a CallOption that indicates the maximum message // size in bytes the client can send. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type MaxSendMsgSizeCallOption struct { MaxSendMsgSize int } @@ -326,7 +349,11 @@ func PerRPCCredentials(creds credentials.PerRPCCredentials) CallOption { // PerRPCCredsCallOption is a CallOption that indicates the per-RPC // credentials to use for the call. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type PerRPCCredsCallOption struct { Creds credentials.PerRPCCredentials } @@ -341,13 +368,20 @@ func (o PerRPCCredsCallOption) after(c *callInfo, attempt *csAttempt) {} // sending the request. If WithCompressor is also set, UseCompressor has // higher priority. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func UseCompressor(name string) CallOption { return CompressorCallOption{CompressorType: name} } // CompressorCallOption is a CallOption that indicates the compressor to use. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type CompressorCallOption struct { CompressorType string } @@ -380,7 +414,11 @@ func CallContentSubtype(contentSubtype string) CallOption { // ContentSubtypeCallOption is a CallOption that indicates the content-subtype // used for marshaling messages. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type ContentSubtypeCallOption struct { ContentSubtype string } @@ -404,7 +442,10 @@ func (o ContentSubtypeCallOption) after(c *callInfo, attempt *csAttempt) {} // This function is provided for advanced users; prefer to use only // CallContentSubtype to select a registered codec instead. // -// This is an EXPERIMENTAL API. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func ForceCodec(codec encoding.Codec) CallOption { return ForceCodecCallOption{Codec: codec} } @@ -412,7 +453,10 @@ func ForceCodec(codec encoding.Codec) CallOption { // ForceCodecCallOption is a CallOption that indicates the codec used for // marshaling messages. // -// This is an EXPERIMENTAL API. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type ForceCodecCallOption struct { Codec encoding.Codec } @@ -434,7 +478,10 @@ func CallCustomCodec(codec Codec) CallOption { // CustomCodecCallOption is a CallOption that indicates the codec used for // marshaling messages. // -// This is an EXPERIMENTAL API. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type CustomCodecCallOption struct { Codec Codec } @@ -448,14 +495,21 @@ func (o CustomCodecCallOption) after(c *callInfo, attempt *csAttempt) {} // MaxRetryRPCBufferSize returns a CallOption that limits the amount of memory // used for buffering this RPC's requests for retry purposes. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func MaxRetryRPCBufferSize(bytes int) CallOption { return MaxRetryRPCBufferSizeCallOption{bytes} } // MaxRetryRPCBufferSizeCallOption is a CallOption indicating the amount of // memory to be used for caching this RPC for retry purposes. -// This is an EXPERIMENTAL API. +// +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type MaxRetryRPCBufferSizeCallOption struct { MaxRetryRPCBufferSize int } @@ -817,40 +871,6 @@ func setCallInfoCodec(c *callInfo) error { return nil } -// parseDialTarget returns the network and address to pass to dialer -func parseDialTarget(target string) (net string, addr string) { - net = "tcp" - - m1 := strings.Index(target, ":") - m2 := strings.Index(target, ":/") - - // handle unix:addr which will fail with url.Parse - if m1 >= 0 && m2 < 0 { - if n := target[0:m1]; n == "unix" { - net = n - addr = target[m1+1:] - return net, addr - } - } - if m2 >= 0 { - t, err := url.Parse(target) - if err != nil { - return net, target - } - scheme := t.Scheme - addr = t.Path - if scheme == "unix" { - net = scheme - if addr == "" { - addr = t.Host - } - return net, addr - } - } - - return net, target -} - // channelzData is used to store channelz related data for ClientConn, addrConn and Server. // These fields cannot be embedded in the original structs (e.g. ClientConn), since to do atomic // operation on int64 variable on 32-bit machine, user is responsible to enforce memory alignment. @@ -866,10 +886,9 @@ type channelzData struct { // The SupportPackageIsVersion variables are referenced from generated protocol // buffer files to ensure compatibility with the gRPC version used. The latest -// support package version is 6. +// support package version is 7. // -// Older versions are kept for compatibility. They may be removed if -// compatibility cannot be maintained. +// Older versions are kept for compatibility. // // These constants should not be referenced from any other code. const ( @@ -877,6 +896,7 @@ const ( SupportPackageIsVersion4 = true SupportPackageIsVersion5 = true SupportPackageIsVersion6 = true + SupportPackageIsVersion7 = true ) const grpcUA = "grpc-go/" + Version diff --git a/vendor/google.golang.org/grpc/server.go b/vendor/google.golang.org/grpc/server.go index 1b56cc2d11f..7a2aa28a114 100644 --- a/vendor/google.golang.org/grpc/server.go +++ b/vendor/google.golang.org/grpc/server.go @@ -40,6 +40,7 @@ import ( "google.golang.org/grpc/encoding" "google.golang.org/grpc/encoding/proto" "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal" "google.golang.org/grpc/internal/binarylog" "google.golang.org/grpc/internal/channelz" "google.golang.org/grpc/internal/grpcrand" @@ -58,6 +59,12 @@ const ( defaultServerMaxSendMessageSize = math.MaxInt32 ) +func init() { + internal.GetServerCredentials = func(srv *Server) credentials.TransportCredentials { + return srv.opts.creds + } +} + var statusOK = status.New(codes.OK, "") var logger = grpclog.Component("core") @@ -80,13 +87,14 @@ type ServiceDesc struct { Metadata interface{} } -// service consists of the information of the server serving this service and -// the methods in this service. -type service struct { - server interface{} // the server for service methods - md map[string]*MethodDesc - sd map[string]*StreamDesc - mdata interface{} +// serviceInfo wraps information about a service. It is very similar to +// ServiceDesc and is constructed from it for internal purposes. +type serviceInfo struct { + // Contains the implementation for the methods in this service. + serviceImpl interface{} + methods map[string]*MethodDesc + streams map[string]*StreamDesc + mdata interface{} } type serverWorkerData struct { @@ -99,14 +107,14 @@ type serverWorkerData struct { type Server struct { opts serverOptions - mu sync.Mutex // guards following - lis map[net.Listener]bool - conns map[transport.ServerTransport]bool - serve bool - drain bool - cv *sync.Cond // signaled when connections close for GracefulStop - m map[string]*service // service name -> service info - events trace.EventLog + mu sync.Mutex // guards following + lis map[net.Listener]bool + conns map[transport.ServerTransport]bool + serve bool + drain bool + cv *sync.Cond // signaled when connections close for GracefulStop + services map[string]*serviceInfo // service name -> service info + events trace.EventLog quit *grpcsync.Event done *grpcsync.Event @@ -162,7 +170,10 @@ type ServerOption interface { // EmptyServerOption does not alter the server configuration. It can be embedded // in another structure to build custom server options. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type EmptyServerOption struct{} func (EmptyServerOption) apply(*serverOptions) {} @@ -404,7 +415,10 @@ func UnknownServiceHandler(streamHandler StreamHandler) ServerOption { // new connections. If this is not set, the default is 120 seconds. A zero or // negative value will result in an immediate timeout. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func ConnectionTimeout(d time.Duration) ServerOption { return newFuncServerOption(func(o *serverOptions) { o.connectionTimeout = d @@ -422,7 +436,10 @@ func MaxHeaderListSize(s uint32) ServerOption { // HeaderTableSize returns a ServerOption that sets the size of dynamic // header table for stream. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func HeaderTableSize(s uint32) ServerOption { return newFuncServerOption(func(o *serverOptions) { o.headerTableSize = &s @@ -434,7 +451,10 @@ func HeaderTableSize(s uint32) ServerOption { // zero (default) will disable workers and spawn a new goroutine for each // stream. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func NumStreamWorkers(numServerWorkers uint32) ServerOption { // TODO: If/when this API gets stabilized (i.e. stream workers become the // only way streams are processed), change the behavior of the zero value to @@ -497,13 +517,13 @@ func NewServer(opt ...ServerOption) *Server { o.apply(&opts) } s := &Server{ - lis: make(map[net.Listener]bool), - opts: opts, - conns: make(map[transport.ServerTransport]bool), - m: make(map[string]*service), - quit: grpcsync.NewEvent(), - done: grpcsync.NewEvent(), - czData: new(channelzData), + lis: make(map[net.Listener]bool), + opts: opts, + conns: make(map[transport.ServerTransport]bool), + services: make(map[string]*serviceInfo), + quit: grpcsync.NewEvent(), + done: grpcsync.NewEvent(), + czData: new(channelzData), } chainUnaryServerInterceptors(s) chainStreamServerInterceptors(s) @@ -539,14 +559,29 @@ func (s *Server) errorf(format string, a ...interface{}) { } } +// ServiceRegistrar wraps a single method that supports service registration. It +// enables users to pass concrete types other than grpc.Server to the service +// registration methods exported by the IDL generated code. +type ServiceRegistrar interface { + // RegisterService registers a service and its implementation to the + // concrete type implementing this interface. It may not be called + // once the server has started serving. + // desc describes the service and its methods and handlers. impl is the + // service implementation which is passed to the method handlers. + RegisterService(desc *ServiceDesc, impl interface{}) +} + // RegisterService registers a service and its implementation to the gRPC // server. It is called from the IDL generated code. This must be called before -// invoking Serve. +// invoking Serve. If ss is non-nil (for legacy code), its type is checked to +// ensure it implements sd.HandlerType. func (s *Server) RegisterService(sd *ServiceDesc, ss interface{}) { - ht := reflect.TypeOf(sd.HandlerType).Elem() - st := reflect.TypeOf(ss) - if !st.Implements(ht) { - logger.Fatalf("grpc: Server.RegisterService found the handler of type %v that does not satisfy %v", st, ht) + if ss != nil { + ht := reflect.TypeOf(sd.HandlerType).Elem() + st := reflect.TypeOf(ss) + if !st.Implements(ht) { + logger.Fatalf("grpc: Server.RegisterService found the handler of type %v that does not satisfy %v", st, ht) + } } s.register(sd, ss) } @@ -558,24 +593,24 @@ func (s *Server) register(sd *ServiceDesc, ss interface{}) { if s.serve { logger.Fatalf("grpc: Server.RegisterService after Server.Serve for %q", sd.ServiceName) } - if _, ok := s.m[sd.ServiceName]; ok { + if _, ok := s.services[sd.ServiceName]; ok { logger.Fatalf("grpc: Server.RegisterService found duplicate service registration for %q", sd.ServiceName) } - srv := &service{ - server: ss, - md: make(map[string]*MethodDesc), - sd: make(map[string]*StreamDesc), - mdata: sd.Metadata, + info := &serviceInfo{ + serviceImpl: ss, + methods: make(map[string]*MethodDesc), + streams: make(map[string]*StreamDesc), + mdata: sd.Metadata, } for i := range sd.Methods { d := &sd.Methods[i] - srv.md[d.MethodName] = d + info.methods[d.MethodName] = d } for i := range sd.Streams { d := &sd.Streams[i] - srv.sd[d.StreamName] = d + info.streams[d.StreamName] = d } - s.m[sd.ServiceName] = srv + s.services[sd.ServiceName] = info } // MethodInfo contains the information of an RPC including its method name and type. @@ -599,16 +634,16 @@ type ServiceInfo struct { // Service names include the package names, in the form of .. func (s *Server) GetServiceInfo() map[string]ServiceInfo { ret := make(map[string]ServiceInfo) - for n, srv := range s.m { - methods := make([]MethodInfo, 0, len(srv.md)+len(srv.sd)) - for m := range srv.md { + for n, srv := range s.services { + methods := make([]MethodInfo, 0, len(srv.methods)+len(srv.streams)) + for m := range srv.methods { methods = append(methods, MethodInfo{ Name: m, IsClientStream: false, IsServerStream: false, }) } - for m, d := range srv.sd { + for m, d := range srv.streams { methods = append(methods, MethodInfo{ Name: m, IsClientStream: d.ClientStreams, @@ -877,8 +912,12 @@ var _ http.Handler = (*Server)(nil) // Note that ServeHTTP uses Go's HTTP/2 server implementation which is totally // separate from grpc-go's HTTP/2 server. Performance and features may vary // between the two paths. ServeHTTP does not support some gRPC features -// available through grpc-go's HTTP/2 server, and it is currently EXPERIMENTAL -// and subject to change. +// available through grpc-go's HTTP/2 server. +// +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { st, err := transport.NewServerHandlerTransport(w, r, s.opts.statsHandler) if err != nil { @@ -1020,7 +1059,7 @@ func getChainUnaryHandler(interceptors []UnaryServerInterceptor, curr int, info } } -func (s *Server) processUnaryRPC(t transport.ServerTransport, stream *transport.Stream, srv *service, md *MethodDesc, trInfo *traceInfo) (err error) { +func (s *Server) processUnaryRPC(t transport.ServerTransport, stream *transport.Stream, info *serviceInfo, md *MethodDesc, trInfo *traceInfo) (err error) { sh := s.opts.statsHandler if sh != nil || trInfo != nil || channelz.IsOn() { if channelz.IsOn() { @@ -1143,10 +1182,8 @@ func (s *Server) processUnaryRPC(t transport.ServerTransport, stream *transport. } d, err := recvAndDecompress(&parser{r: stream}, stream, dc, s.opts.maxReceiveMessageSize, payInfo, decomp) if err != nil { - if st, ok := status.FromError(err); ok { - if e := t.WriteStatus(stream, st); e != nil { - channelz.Warningf(logger, s.channelzID, "grpc: Server.processUnaryRPC failed to write status %v", e) - } + if e := t.WriteStatus(stream, status.Convert(err)); e != nil { + channelz.Warningf(logger, s.channelzID, "grpc: Server.processUnaryRPC failed to write status %v", e) } return err } @@ -1161,7 +1198,7 @@ func (s *Server) processUnaryRPC(t transport.ServerTransport, stream *transport. sh.HandleRPC(stream.Context(), &stats.InPayload{ RecvTime: time.Now(), Payload: v, - WireLength: payInfo.wireLength, + WireLength: payInfo.wireLength + headerLen, Data: d, Length: len(d), }) @@ -1177,7 +1214,7 @@ func (s *Server) processUnaryRPC(t transport.ServerTransport, stream *transport. return nil } ctx := NewContextWithServerTransportStream(stream.Context(), stream) - reply, appErr := md.Handler(srv.server, ctx, df, s.opts.unaryInt) + reply, appErr := md.Handler(info.serviceImpl, ctx, df, s.opts.unaryInt) if appErr != nil { appStatus, ok := status.FromError(appErr) if !ok { @@ -1303,7 +1340,7 @@ func getChainStreamHandler(interceptors []StreamServerInterceptor, curr int, inf } } -func (s *Server) processStreamingRPC(t transport.ServerTransport, stream *transport.Stream, srv *service, sd *StreamDesc, trInfo *traceInfo) (err error) { +func (s *Server) processStreamingRPC(t transport.ServerTransport, stream *transport.Stream, info *serviceInfo, sd *StreamDesc, trInfo *traceInfo) (err error) { if channelz.IsOn() { s.incrCallsStarted() } @@ -1420,8 +1457,8 @@ func (s *Server) processStreamingRPC(t transport.ServerTransport, stream *transp } var appErr error var server interface{} - if srv != nil { - server = srv.server + if info != nil { + server = info.serviceImpl } if s.opts.streamInt == nil { appErr = sd.Handler(server, ss) @@ -1497,13 +1534,13 @@ func (s *Server) handleStream(t transport.ServerTransport, stream *transport.Str service := sm[:pos] method := sm[pos+1:] - srv, knownService := s.m[service] + srv, knownService := s.services[service] if knownService { - if md, ok := srv.md[method]; ok { + if md, ok := srv.methods[method]; ok { s.processUnaryRPC(t, stream, srv, md, trInfo) return } - if sd, ok := srv.sd[method]; ok { + if sd, ok := srv.streams[method]; ok { s.processStreamingRPC(t, stream, srv, sd, trInfo) return } @@ -1541,7 +1578,10 @@ type streamKey struct{} // NewContextWithServerTransportStream creates a new context from ctx and // attaches stream to it. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func NewContextWithServerTransportStream(ctx context.Context, stream ServerTransportStream) context.Context { return context.WithValue(ctx, streamKey{}, stream) } @@ -1553,7 +1593,10 @@ func NewContextWithServerTransportStream(ctx context.Context, stream ServerTrans // // See also NewContextWithServerTransportStream. // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This type is EXPERIMENTAL and may be changed or removed in a +// later release. type ServerTransportStream interface { Method() string SetHeader(md metadata.MD) error @@ -1565,7 +1608,10 @@ type ServerTransportStream interface { // ctx. Returns nil if the given context has no stream associated with it // (which implies it is not an RPC invocation context). // -// This API is EXPERIMENTAL. +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. func ServerTransportStreamFromContext(ctx context.Context) ServerTransportStream { s, _ := ctx.Value(streamKey{}).(ServerTransportStream) return s diff --git a/vendor/google.golang.org/grpc/service_config.go b/vendor/google.golang.org/grpc/service_config.go index 5e434ca7f35..22c4240cf7e 100644 --- a/vendor/google.golang.org/grpc/service_config.go +++ b/vendor/google.golang.org/grpc/service_config.go @@ -41,29 +41,7 @@ const maxInt = int(^uint(0) >> 1) // Deprecated: Users should not use this struct. Service config should be received // through name resolver, as specified here // https://github.com/grpc/grpc/blob/master/doc/service_config.md -type MethodConfig struct { - // WaitForReady indicates whether RPCs sent to this method should wait until - // the connection is ready by default (!failfast). The value specified via the - // gRPC client API will override the value set here. - WaitForReady *bool - // Timeout is the default timeout for RPCs sent to this method. The actual - // deadline used will be the minimum of the value specified here and the value - // set by the application via the gRPC client API. If either one is not set, - // then the other will be used. If neither is set, then the RPC has no deadline. - Timeout *time.Duration - // MaxReqSize is the maximum allowed payload size for an individual request in a - // stream (client->server) in bytes. The size which is measured is the serialized - // payload after per-message compression (but before stream compression) in bytes. - // The actual value used is the minimum of the value specified here and the value set - // by the application via the gRPC client API. If either one is not set, then the other - // will be used. If neither is set, then the built-in default is used. - MaxReqSize *int - // MaxRespSize is the maximum allowed payload size for an individual response in a - // stream (server->client) in bytes. - MaxRespSize *int - // RetryPolicy configures retry options for the method. - retryPolicy *retryPolicy -} +type MethodConfig = internalserviceconfig.MethodConfig type lbConfig struct { name string @@ -127,34 +105,6 @@ type healthCheckConfig struct { ServiceName string } -// retryPolicy defines the go-native version of the retry policy defined by the -// service config here: -// https://github.com/grpc/proposal/blob/master/A6-client-retries.md#integration-with-service-config -type retryPolicy struct { - // MaxAttempts is the maximum number of attempts, including the original RPC. - // - // This field is required and must be two or greater. - maxAttempts int - - // Exponential backoff parameters. The initial retry attempt will occur at - // random(0, initialBackoff). In general, the nth attempt will occur at - // random(0, - // min(initialBackoff*backoffMultiplier**(n-1), maxBackoff)). - // - // These fields are required and must be greater than zero. - initialBackoff time.Duration - maxBackoff time.Duration - backoffMultiplier float64 - - // The set of status codes which may be retried. - // - // Status codes are specified as strings, e.g., "UNAVAILABLE". - // - // This field is required and must be non-empty. - // Note: a set is used to store this for easy lookup. - retryableStatusCodes map[codes.Code]bool -} - type jsonRetryPolicy struct { MaxAttempts int InitialBackoff string @@ -313,7 +263,7 @@ func parseServiceConfig(js string) *serviceconfig.ParseResult { WaitForReady: m.WaitForReady, Timeout: d, } - if mc.retryPolicy, err = convertRetryPolicy(m.RetryPolicy); err != nil { + if mc.RetryPolicy, err = convertRetryPolicy(m.RetryPolicy); err != nil { logger.Warningf("grpc: parseServiceConfig error unmarshaling %s due to %v", js, err) return &serviceconfig.ParseResult{Err: err} } @@ -359,7 +309,7 @@ func parseServiceConfig(js string) *serviceconfig.ParseResult { return &serviceconfig.ParseResult{Config: &sc} } -func convertRetryPolicy(jrp *jsonRetryPolicy) (p *retryPolicy, err error) { +func convertRetryPolicy(jrp *jsonRetryPolicy) (p *internalserviceconfig.RetryPolicy, err error) { if jrp == nil { return nil, nil } @@ -381,19 +331,19 @@ func convertRetryPolicy(jrp *jsonRetryPolicy) (p *retryPolicy, err error) { return nil, nil } - rp := &retryPolicy{ - maxAttempts: jrp.MaxAttempts, - initialBackoff: *ib, - maxBackoff: *mb, - backoffMultiplier: jrp.BackoffMultiplier, - retryableStatusCodes: make(map[codes.Code]bool), + rp := &internalserviceconfig.RetryPolicy{ + MaxAttempts: jrp.MaxAttempts, + InitialBackoff: *ib, + MaxBackoff: *mb, + BackoffMultiplier: jrp.BackoffMultiplier, + RetryableStatusCodes: make(map[codes.Code]bool), } - if rp.maxAttempts > 5 { + if rp.MaxAttempts > 5 { // TODO(retry): Make the max maxAttempts configurable. - rp.maxAttempts = 5 + rp.MaxAttempts = 5 } for _, code := range jrp.RetryableStatusCodes { - rp.retryableStatusCodes[code] = true + rp.RetryableStatusCodes[code] = true } return rp, nil } diff --git a/vendor/google.golang.org/grpc/serviceconfig/serviceconfig.go b/vendor/google.golang.org/grpc/serviceconfig/serviceconfig.go index 187c304421c..73a2f926613 100644 --- a/vendor/google.golang.org/grpc/serviceconfig/serviceconfig.go +++ b/vendor/google.golang.org/grpc/serviceconfig/serviceconfig.go @@ -19,7 +19,10 @@ // Package serviceconfig defines types and methods for operating on gRPC // service configs. // -// This package is EXPERIMENTAL. +// Experimental +// +// Notice: This package is EXPERIMENTAL and may be changed or removed in a +// later release. package serviceconfig // Config represents an opaque data structure holding a service config. diff --git a/vendor/google.golang.org/grpc/status/status.go b/vendor/google.golang.org/grpc/status/status.go index 01e182c306c..54d187186b8 100644 --- a/vendor/google.golang.org/grpc/status/status.go +++ b/vendor/google.golang.org/grpc/status/status.go @@ -73,9 +73,11 @@ func FromProto(s *spb.Status) *Status { return status.FromProto(s) } -// FromError returns a Status representing err if it was produced from this -// package or has a method `GRPCStatus() *Status`. Otherwise, ok is false and a -// Status is returned with codes.Unknown and the original error message. +// FromError returns a Status representing err if it was produced by this +// package or has a method `GRPCStatus() *Status`. +// If err is nil, a Status is returned with codes.OK and no message. +// Otherwise, ok is false and a Status is returned with codes.Unknown and +// the original error message. func FromError(err error) (s *Status, ok bool) { if err == nil { return nil, true diff --git a/vendor/google.golang.org/grpc/stream.go b/vendor/google.golang.org/grpc/stream.go index fbc3fb11cb4..eda1248d60c 100644 --- a/vendor/google.golang.org/grpc/stream.go +++ b/vendor/google.golang.org/grpc/stream.go @@ -36,6 +36,8 @@ import ( "google.golang.org/grpc/internal/channelz" "google.golang.org/grpc/internal/grpcrand" "google.golang.org/grpc/internal/grpcutil" + iresolver "google.golang.org/grpc/internal/resolver" + "google.golang.org/grpc/internal/serviceconfig" "google.golang.org/grpc/internal/transport" "google.golang.org/grpc/metadata" "google.golang.org/grpc/peer" @@ -170,7 +172,21 @@ func newClientStream(ctx context.Context, desc *StreamDesc, cc *ClientConn, meth if err := cc.waitForResolvedAddrs(ctx); err != nil { return nil, err } - mc := cc.GetMethodConfig(method) + + var mc serviceconfig.MethodConfig + var onCommit func() + rpcConfig, err := cc.safeConfigSelector.SelectConfig(iresolver.RPCInfo{Context: ctx, Method: method}) + if err != nil { + return nil, status.Convert(err).Err() + } + if rpcConfig != nil { + if rpcConfig.Context != nil { + ctx = rpcConfig.Context + } + mc = rpcConfig.MethodConfig + onCommit = rpcConfig.OnCommitted + } + if mc.WaitForReady != nil { c.failFast = !*mc.WaitForReady } @@ -272,6 +288,7 @@ func newClientStream(ctx context.Context, desc *StreamDesc, cc *ClientConn, meth cancel: cancel, beginTime: beginTime, firstAttempt: true, + onCommit: onCommit, } if !cc.dopts.disableRetry { cs.retryThrottler = cc.retryThrottler.Load().(*retryThrottler) @@ -432,7 +449,8 @@ type clientStream struct { // place where we need to check if the attempt is nil. attempt *csAttempt // TODO(hedging): hedging will have multiple attempts simultaneously. - committed bool // active attempt committed for retry? + committed bool // active attempt committed for retry? + onCommit func() buffer []func(a *csAttempt) error // operations to replay on retry bufferSize int // current size of buffer } @@ -461,6 +479,9 @@ type csAttempt struct { } func (cs *clientStream) commitAttemptLocked() { + if !cs.committed && cs.onCommit != nil { + cs.onCommit() + } cs.committed = true cs.buffer = nil } @@ -539,8 +560,8 @@ func (cs *clientStream) shouldRetry(err error) error { code = status.Convert(err).Code() } - rp := cs.methodConfig.retryPolicy - if rp == nil || !rp.retryableStatusCodes[code] { + rp := cs.methodConfig.RetryPolicy + if rp == nil || !rp.RetryableStatusCodes[code] { return err } @@ -549,7 +570,7 @@ func (cs *clientStream) shouldRetry(err error) error { if cs.retryThrottler.throttle() { return err } - if cs.numRetries+1 >= rp.maxAttempts { + if cs.numRetries+1 >= rp.MaxAttempts { return err } @@ -558,9 +579,9 @@ func (cs *clientStream) shouldRetry(err error) error { dur = time.Millisecond * time.Duration(pushback) cs.numRetriesSincePushback = 0 } else { - fact := math.Pow(rp.backoffMultiplier, float64(cs.numRetriesSincePushback)) - cur := float64(rp.initialBackoff) * fact - if max := float64(rp.maxBackoff); cur > max { + fact := math.Pow(rp.BackoffMultiplier, float64(cs.numRetriesSincePushback)) + cur := float64(rp.InitialBackoff) * fact + if max := float64(rp.MaxBackoff); cur > max { cur = max } dur = time.Duration(grpcrand.Int63n(int64(cur))) @@ -929,7 +950,7 @@ func (a *csAttempt) recvMsg(m interface{}, payInfo *payloadInfo) (err error) { Payload: m, // TODO truncate large payload. Data: payInfo.uncompressedBytes, - WireLength: payInfo.wireLength, + WireLength: payInfo.wireLength + headerLen, Length: len(payInfo.uncompressedBytes), }) } @@ -1511,7 +1532,7 @@ func (ss *serverStream) RecvMsg(m interface{}) (err error) { Payload: m, // TODO truncate large payload. Data: payInfo.uncompressedBytes, - WireLength: payInfo.wireLength, + WireLength: payInfo.wireLength + headerLen, Length: len(payInfo.uncompressedBytes), }) } diff --git a/vendor/google.golang.org/grpc/tap/tap.go b/vendor/google.golang.org/grpc/tap/tap.go index 584360f681b..caea1ebed6e 100644 --- a/vendor/google.golang.org/grpc/tap/tap.go +++ b/vendor/google.golang.org/grpc/tap/tap.go @@ -17,7 +17,12 @@ */ // Package tap defines the function handles which are executed on the transport -// layer of gRPC-Go and related information. Everything here is EXPERIMENTAL. +// layer of gRPC-Go and related information. +// +// Experimental +// +// Notice: This API is EXPERIMENTAL and may be changed or removed in a +// later release. package tap import ( diff --git a/vendor/google.golang.org/grpc/test/grpc_testing/test.pb.go b/vendor/google.golang.org/grpc/test/grpc_testing/test.pb.go index 70e3b89228b..89ebb3420f2 100644 --- a/vendor/google.golang.org/grpc/test/grpc_testing/test.pb.go +++ b/vendor/google.golang.org/grpc/test/grpc_testing/test.pb.go @@ -1,24 +1,46 @@ +// Copyright 2017 gRPC authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// An integration test service that covers all the method signature permutations +// of unary/streaming requests/responses. + // Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.25.0 +// protoc v3.14.0 // source: test/grpc_testing/test.proto package grpc_testing import ( - fmt "fmt" proto "github.com/golang/protobuf/proto" - math "math" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" ) -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package +// This is a compile-time assertion that a sufficiently up-to-date version +// of the legacy proto package is being used. +const _ = proto.ProtoPackageIsVersion4 // The type of payload that should be returned. type PayloadType int32 @@ -32,109 +54,149 @@ const ( PayloadType_RANDOM PayloadType = 2 ) -var PayloadType_name = map[int32]string{ - 0: "COMPRESSABLE", - 1: "UNCOMPRESSABLE", - 2: "RANDOM", -} +// Enum value maps for PayloadType. +var ( + PayloadType_name = map[int32]string{ + 0: "COMPRESSABLE", + 1: "UNCOMPRESSABLE", + 2: "RANDOM", + } + PayloadType_value = map[string]int32{ + "COMPRESSABLE": 0, + "UNCOMPRESSABLE": 1, + "RANDOM": 2, + } +) -var PayloadType_value = map[string]int32{ - "COMPRESSABLE": 0, - "UNCOMPRESSABLE": 1, - "RANDOM": 2, +func (x PayloadType) Enum() *PayloadType { + p := new(PayloadType) + *p = x + return p } func (x PayloadType) String() string { - return proto.EnumName(PayloadType_name, int32(x)) + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } -func (PayloadType) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{0} +func (PayloadType) Descriptor() protoreflect.EnumDescriptor { + return file_test_grpc_testing_test_proto_enumTypes[0].Descriptor() } -type Empty struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` +func (PayloadType) Type() protoreflect.EnumType { + return &file_test_grpc_testing_test_proto_enumTypes[0] } -func (m *Empty) Reset() { *m = Empty{} } -func (m *Empty) String() string { return proto.CompactTextString(m) } -func (*Empty) ProtoMessage() {} -func (*Empty) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{0} +func (x PayloadType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) } -func (m *Empty) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Empty.Unmarshal(m, b) +// Deprecated: Use PayloadType.Descriptor instead. +func (PayloadType) EnumDescriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{0} } -func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Empty.Marshal(b, m, deterministic) + +type Empty struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields } -func (m *Empty) XXX_Merge(src proto.Message) { - xxx_messageInfo_Empty.Merge(m, src) + +func (x *Empty) Reset() { + *x = Empty{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Empty) XXX_Size() int { - return xxx_messageInfo_Empty.Size(m) + +func (x *Empty) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Empty) XXX_DiscardUnknown() { - xxx_messageInfo_Empty.DiscardUnknown(m) + +func (*Empty) ProtoMessage() {} + +func (x *Empty) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Empty proto.InternalMessageInfo +// Deprecated: Use Empty.ProtoReflect.Descriptor instead. +func (*Empty) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{0} +} // A block of data, to simply increase gRPC message size. type Payload struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // The type of data in body. Type PayloadType `protobuf:"varint,1,opt,name=type,proto3,enum=grpc.testing.PayloadType" json:"type,omitempty"` // Primary contents of payload. - Body []byte `protobuf:"bytes,2,opt,name=body,proto3" json:"body,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Body []byte `protobuf:"bytes,2,opt,name=body,proto3" json:"body,omitempty"` } -func (m *Payload) Reset() { *m = Payload{} } -func (m *Payload) String() string { return proto.CompactTextString(m) } -func (*Payload) ProtoMessage() {} -func (*Payload) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{1} +func (x *Payload) Reset() { + *x = Payload{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Payload) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Payload.Unmarshal(m, b) -} -func (m *Payload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Payload.Marshal(b, m, deterministic) -} -func (m *Payload) XXX_Merge(src proto.Message) { - xxx_messageInfo_Payload.Merge(m, src) +func (x *Payload) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Payload) XXX_Size() int { - return xxx_messageInfo_Payload.Size(m) -} -func (m *Payload) XXX_DiscardUnknown() { - xxx_messageInfo_Payload.DiscardUnknown(m) + +func (*Payload) ProtoMessage() {} + +func (x *Payload) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Payload proto.InternalMessageInfo +// Deprecated: Use Payload.ProtoReflect.Descriptor instead. +func (*Payload) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{1} +} -func (m *Payload) GetType() PayloadType { - if m != nil { - return m.Type +func (x *Payload) GetType() PayloadType { + if x != nil { + return x.Type } return PayloadType_COMPRESSABLE } -func (m *Payload) GetBody() []byte { - if m != nil { - return m.Body +func (x *Payload) GetBody() []byte { + if x != nil { + return x.Body } return nil } // Unary request. type SimpleRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // Desired payload type in the response from the server. // If response_type is RANDOM, server randomly chooses one from other formats. ResponseType PayloadType `protobuf:"varint,1,opt,name=response_type,json=responseType,proto3,enum=grpc.testing.PayloadType" json:"response_type,omitempty"` @@ -146,268 +208,308 @@ type SimpleRequest struct { // Whether SimpleResponse should include username. FillUsername bool `protobuf:"varint,4,opt,name=fill_username,json=fillUsername,proto3" json:"fill_username,omitempty"` // Whether SimpleResponse should include OAuth scope. - FillOauthScope bool `protobuf:"varint,5,opt,name=fill_oauth_scope,json=fillOauthScope,proto3" json:"fill_oauth_scope,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + FillOauthScope bool `protobuf:"varint,5,opt,name=fill_oauth_scope,json=fillOauthScope,proto3" json:"fill_oauth_scope,omitempty"` } -func (m *SimpleRequest) Reset() { *m = SimpleRequest{} } -func (m *SimpleRequest) String() string { return proto.CompactTextString(m) } -func (*SimpleRequest) ProtoMessage() {} -func (*SimpleRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{2} +func (x *SimpleRequest) Reset() { + *x = SimpleRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *SimpleRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SimpleRequest.Unmarshal(m, b) -} -func (m *SimpleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SimpleRequest.Marshal(b, m, deterministic) -} -func (m *SimpleRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_SimpleRequest.Merge(m, src) +func (x *SimpleRequest) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *SimpleRequest) XXX_Size() int { - return xxx_messageInfo_SimpleRequest.Size(m) -} -func (m *SimpleRequest) XXX_DiscardUnknown() { - xxx_messageInfo_SimpleRequest.DiscardUnknown(m) + +func (*SimpleRequest) ProtoMessage() {} + +func (x *SimpleRequest) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_SimpleRequest proto.InternalMessageInfo +// Deprecated: Use SimpleRequest.ProtoReflect.Descriptor instead. +func (*SimpleRequest) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{2} +} -func (m *SimpleRequest) GetResponseType() PayloadType { - if m != nil { - return m.ResponseType +func (x *SimpleRequest) GetResponseType() PayloadType { + if x != nil { + return x.ResponseType } return PayloadType_COMPRESSABLE } -func (m *SimpleRequest) GetResponseSize() int32 { - if m != nil { - return m.ResponseSize +func (x *SimpleRequest) GetResponseSize() int32 { + if x != nil { + return x.ResponseSize } return 0 } -func (m *SimpleRequest) GetPayload() *Payload { - if m != nil { - return m.Payload +func (x *SimpleRequest) GetPayload() *Payload { + if x != nil { + return x.Payload } return nil } -func (m *SimpleRequest) GetFillUsername() bool { - if m != nil { - return m.FillUsername +func (x *SimpleRequest) GetFillUsername() bool { + if x != nil { + return x.FillUsername } return false } -func (m *SimpleRequest) GetFillOauthScope() bool { - if m != nil { - return m.FillOauthScope +func (x *SimpleRequest) GetFillOauthScope() bool { + if x != nil { + return x.FillOauthScope } return false } // Unary response, as configured by the request. type SimpleResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // Payload to increase message size. Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` // The user the request came from, for verifying authentication was // successful when the client expected it. Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty"` // OAuth scope. - OauthScope string `protobuf:"bytes,3,opt,name=oauth_scope,json=oauthScope,proto3" json:"oauth_scope,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + OauthScope string `protobuf:"bytes,3,opt,name=oauth_scope,json=oauthScope,proto3" json:"oauth_scope,omitempty"` } -func (m *SimpleResponse) Reset() { *m = SimpleResponse{} } -func (m *SimpleResponse) String() string { return proto.CompactTextString(m) } -func (*SimpleResponse) ProtoMessage() {} -func (*SimpleResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{3} +func (x *SimpleResponse) Reset() { + *x = SimpleResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *SimpleResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SimpleResponse.Unmarshal(m, b) -} -func (m *SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SimpleResponse.Marshal(b, m, deterministic) -} -func (m *SimpleResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_SimpleResponse.Merge(m, src) +func (x *SimpleResponse) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *SimpleResponse) XXX_Size() int { - return xxx_messageInfo_SimpleResponse.Size(m) -} -func (m *SimpleResponse) XXX_DiscardUnknown() { - xxx_messageInfo_SimpleResponse.DiscardUnknown(m) + +func (*SimpleResponse) ProtoMessage() {} + +func (x *SimpleResponse) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_SimpleResponse proto.InternalMessageInfo +// Deprecated: Use SimpleResponse.ProtoReflect.Descriptor instead. +func (*SimpleResponse) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{3} +} -func (m *SimpleResponse) GetPayload() *Payload { - if m != nil { - return m.Payload +func (x *SimpleResponse) GetPayload() *Payload { + if x != nil { + return x.Payload } return nil } -func (m *SimpleResponse) GetUsername() string { - if m != nil { - return m.Username +func (x *SimpleResponse) GetUsername() string { + if x != nil { + return x.Username } return "" } -func (m *SimpleResponse) GetOauthScope() string { - if m != nil { - return m.OauthScope +func (x *SimpleResponse) GetOauthScope() string { + if x != nil { + return x.OauthScope } return "" } // Client-streaming request. type StreamingInputCallRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // Optional input payload sent along with the request. - Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` } -func (m *StreamingInputCallRequest) Reset() { *m = StreamingInputCallRequest{} } -func (m *StreamingInputCallRequest) String() string { return proto.CompactTextString(m) } -func (*StreamingInputCallRequest) ProtoMessage() {} -func (*StreamingInputCallRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{4} +func (x *StreamingInputCallRequest) Reset() { + *x = StreamingInputCallRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *StreamingInputCallRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StreamingInputCallRequest.Unmarshal(m, b) +func (x *StreamingInputCallRequest) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *StreamingInputCallRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StreamingInputCallRequest.Marshal(b, m, deterministic) -} -func (m *StreamingInputCallRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_StreamingInputCallRequest.Merge(m, src) -} -func (m *StreamingInputCallRequest) XXX_Size() int { - return xxx_messageInfo_StreamingInputCallRequest.Size(m) -} -func (m *StreamingInputCallRequest) XXX_DiscardUnknown() { - xxx_messageInfo_StreamingInputCallRequest.DiscardUnknown(m) + +func (*StreamingInputCallRequest) ProtoMessage() {} + +func (x *StreamingInputCallRequest) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_StreamingInputCallRequest proto.InternalMessageInfo +// Deprecated: Use StreamingInputCallRequest.ProtoReflect.Descriptor instead. +func (*StreamingInputCallRequest) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{4} +} -func (m *StreamingInputCallRequest) GetPayload() *Payload { - if m != nil { - return m.Payload +func (x *StreamingInputCallRequest) GetPayload() *Payload { + if x != nil { + return x.Payload } return nil } // Client-streaming response. type StreamingInputCallResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // Aggregated size of payloads received from the client. - AggregatedPayloadSize int32 `protobuf:"varint,1,opt,name=aggregated_payload_size,json=aggregatedPayloadSize,proto3" json:"aggregated_payload_size,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + AggregatedPayloadSize int32 `protobuf:"varint,1,opt,name=aggregated_payload_size,json=aggregatedPayloadSize,proto3" json:"aggregated_payload_size,omitempty"` } -func (m *StreamingInputCallResponse) Reset() { *m = StreamingInputCallResponse{} } -func (m *StreamingInputCallResponse) String() string { return proto.CompactTextString(m) } -func (*StreamingInputCallResponse) ProtoMessage() {} -func (*StreamingInputCallResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{5} +func (x *StreamingInputCallResponse) Reset() { + *x = StreamingInputCallResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *StreamingInputCallResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StreamingInputCallResponse.Unmarshal(m, b) -} -func (m *StreamingInputCallResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StreamingInputCallResponse.Marshal(b, m, deterministic) +func (x *StreamingInputCallResponse) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *StreamingInputCallResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_StreamingInputCallResponse.Merge(m, src) -} -func (m *StreamingInputCallResponse) XXX_Size() int { - return xxx_messageInfo_StreamingInputCallResponse.Size(m) -} -func (m *StreamingInputCallResponse) XXX_DiscardUnknown() { - xxx_messageInfo_StreamingInputCallResponse.DiscardUnknown(m) + +func (*StreamingInputCallResponse) ProtoMessage() {} + +func (x *StreamingInputCallResponse) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_StreamingInputCallResponse proto.InternalMessageInfo +// Deprecated: Use StreamingInputCallResponse.ProtoReflect.Descriptor instead. +func (*StreamingInputCallResponse) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{5} +} -func (m *StreamingInputCallResponse) GetAggregatedPayloadSize() int32 { - if m != nil { - return m.AggregatedPayloadSize +func (x *StreamingInputCallResponse) GetAggregatedPayloadSize() int32 { + if x != nil { + return x.AggregatedPayloadSize } return 0 } // Configuration for a particular response. type ResponseParameters struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // Desired payload sizes in responses from the server. // If response_type is COMPRESSABLE, this denotes the size before compression. Size int32 `protobuf:"varint,1,opt,name=size,proto3" json:"size,omitempty"` // Desired interval between consecutive responses in the response stream in // microseconds. - IntervalUs int32 `protobuf:"varint,2,opt,name=interval_us,json=intervalUs,proto3" json:"interval_us,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + IntervalUs int32 `protobuf:"varint,2,opt,name=interval_us,json=intervalUs,proto3" json:"interval_us,omitempty"` } -func (m *ResponseParameters) Reset() { *m = ResponseParameters{} } -func (m *ResponseParameters) String() string { return proto.CompactTextString(m) } -func (*ResponseParameters) ProtoMessage() {} -func (*ResponseParameters) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{6} +func (x *ResponseParameters) Reset() { + *x = ResponseParameters{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *ResponseParameters) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ResponseParameters.Unmarshal(m, b) +func (x *ResponseParameters) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *ResponseParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ResponseParameters.Marshal(b, m, deterministic) -} -func (m *ResponseParameters) XXX_Merge(src proto.Message) { - xxx_messageInfo_ResponseParameters.Merge(m, src) -} -func (m *ResponseParameters) XXX_Size() int { - return xxx_messageInfo_ResponseParameters.Size(m) -} -func (m *ResponseParameters) XXX_DiscardUnknown() { - xxx_messageInfo_ResponseParameters.DiscardUnknown(m) + +func (*ResponseParameters) ProtoMessage() {} + +func (x *ResponseParameters) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_ResponseParameters proto.InternalMessageInfo +// Deprecated: Use ResponseParameters.ProtoReflect.Descriptor instead. +func (*ResponseParameters) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{6} +} -func (m *ResponseParameters) GetSize() int32 { - if m != nil { - return m.Size +func (x *ResponseParameters) GetSize() int32 { + if x != nil { + return x.Size } return 0 } -func (m *ResponseParameters) GetIntervalUs() int32 { - if m != nil { - return m.IntervalUs +func (x *ResponseParameters) GetIntervalUs() int32 { + if x != nil { + return x.IntervalUs } return 0 } // Server-streaming request. type StreamingOutputCallRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // Desired payload type in the response from the server. // If response_type is RANDOM, the payload from each response in the stream // might be of different types. This is to simulate a mixed type of payload @@ -416,153 +518,412 @@ type StreamingOutputCallRequest struct { // Configuration for each expected response message. ResponseParameters []*ResponseParameters `protobuf:"bytes,2,rep,name=response_parameters,json=responseParameters,proto3" json:"response_parameters,omitempty"` // Optional input payload sent along with the request. - Payload *Payload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Payload *Payload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` } -func (m *StreamingOutputCallRequest) Reset() { *m = StreamingOutputCallRequest{} } -func (m *StreamingOutputCallRequest) String() string { return proto.CompactTextString(m) } -func (*StreamingOutputCallRequest) ProtoMessage() {} -func (*StreamingOutputCallRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{7} +func (x *StreamingOutputCallRequest) Reset() { + *x = StreamingOutputCallRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *StreamingOutputCallRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StreamingOutputCallRequest.Unmarshal(m, b) -} -func (m *StreamingOutputCallRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StreamingOutputCallRequest.Marshal(b, m, deterministic) -} -func (m *StreamingOutputCallRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_StreamingOutputCallRequest.Merge(m, src) -} -func (m *StreamingOutputCallRequest) XXX_Size() int { - return xxx_messageInfo_StreamingOutputCallRequest.Size(m) +func (x *StreamingOutputCallRequest) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *StreamingOutputCallRequest) XXX_DiscardUnknown() { - xxx_messageInfo_StreamingOutputCallRequest.DiscardUnknown(m) + +func (*StreamingOutputCallRequest) ProtoMessage() {} + +func (x *StreamingOutputCallRequest) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_StreamingOutputCallRequest proto.InternalMessageInfo +// Deprecated: Use StreamingOutputCallRequest.ProtoReflect.Descriptor instead. +func (*StreamingOutputCallRequest) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{7} +} -func (m *StreamingOutputCallRequest) GetResponseType() PayloadType { - if m != nil { - return m.ResponseType +func (x *StreamingOutputCallRequest) GetResponseType() PayloadType { + if x != nil { + return x.ResponseType } return PayloadType_COMPRESSABLE } -func (m *StreamingOutputCallRequest) GetResponseParameters() []*ResponseParameters { - if m != nil { - return m.ResponseParameters +func (x *StreamingOutputCallRequest) GetResponseParameters() []*ResponseParameters { + if x != nil { + return x.ResponseParameters } return nil } -func (m *StreamingOutputCallRequest) GetPayload() *Payload { - if m != nil { - return m.Payload +func (x *StreamingOutputCallRequest) GetPayload() *Payload { + if x != nil { + return x.Payload } return nil } // Server-streaming response, as configured by the request and parameters. type StreamingOutputCallResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + // Payload to increase response size. - Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` } -func (m *StreamingOutputCallResponse) Reset() { *m = StreamingOutputCallResponse{} } -func (m *StreamingOutputCallResponse) String() string { return proto.CompactTextString(m) } -func (*StreamingOutputCallResponse) ProtoMessage() {} -func (*StreamingOutputCallResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_139516ae706ad4b7, []int{8} +func (x *StreamingOutputCallResponse) Reset() { + *x = StreamingOutputCallResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_test_grpc_testing_test_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *StreamingOutputCallResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StreamingOutputCallResponse.Unmarshal(m, b) -} -func (m *StreamingOutputCallResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StreamingOutputCallResponse.Marshal(b, m, deterministic) -} -func (m *StreamingOutputCallResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_StreamingOutputCallResponse.Merge(m, src) +func (x *StreamingOutputCallResponse) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *StreamingOutputCallResponse) XXX_Size() int { - return xxx_messageInfo_StreamingOutputCallResponse.Size(m) -} -func (m *StreamingOutputCallResponse) XXX_DiscardUnknown() { - xxx_messageInfo_StreamingOutputCallResponse.DiscardUnknown(m) + +func (*StreamingOutputCallResponse) ProtoMessage() {} + +func (x *StreamingOutputCallResponse) ProtoReflect() protoreflect.Message { + mi := &file_test_grpc_testing_test_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_StreamingOutputCallResponse proto.InternalMessageInfo +// Deprecated: Use StreamingOutputCallResponse.ProtoReflect.Descriptor instead. +func (*StreamingOutputCallResponse) Descriptor() ([]byte, []int) { + return file_test_grpc_testing_test_proto_rawDescGZIP(), []int{8} +} -func (m *StreamingOutputCallResponse) GetPayload() *Payload { - if m != nil { - return m.Payload +func (x *StreamingOutputCallResponse) GetPayload() *Payload { + if x != nil { + return x.Payload } return nil } -func init() { - proto.RegisterEnum("grpc.testing.PayloadType", PayloadType_name, PayloadType_value) - proto.RegisterType((*Empty)(nil), "grpc.testing.Empty") - proto.RegisterType((*Payload)(nil), "grpc.testing.Payload") - proto.RegisterType((*SimpleRequest)(nil), "grpc.testing.SimpleRequest") - proto.RegisterType((*SimpleResponse)(nil), "grpc.testing.SimpleResponse") - proto.RegisterType((*StreamingInputCallRequest)(nil), "grpc.testing.StreamingInputCallRequest") - proto.RegisterType((*StreamingInputCallResponse)(nil), "grpc.testing.StreamingInputCallResponse") - proto.RegisterType((*ResponseParameters)(nil), "grpc.testing.ResponseParameters") - proto.RegisterType((*StreamingOutputCallRequest)(nil), "grpc.testing.StreamingOutputCallRequest") - proto.RegisterType((*StreamingOutputCallResponse)(nil), "grpc.testing.StreamingOutputCallResponse") -} - -func init() { proto.RegisterFile("test/grpc_testing/test.proto", fileDescriptor_139516ae706ad4b7) } - -var fileDescriptor_139516ae706ad4b7 = []byte{ - // 615 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0xdd, 0x6e, 0xd3, 0x4c, - 0x10, 0xfd, 0xb6, 0x4d, 0xff, 0x26, 0x69, 0x14, 0x6d, 0x55, 0x7d, 0x69, 0x5a, 0x89, 0xc8, 0x5c, - 0x60, 0x2a, 0x91, 0xa0, 0x20, 0xb8, 0x04, 0xf5, 0x57, 0x54, 0x6a, 0x9b, 0x62, 0x37, 0x37, 0xdc, - 0x44, 0xdb, 0x76, 0x6a, 0x2c, 0x6d, 0xbc, 0xcb, 0x7a, 0x5d, 0x91, 0x5e, 0xf0, 0x62, 0xbc, 0x0c, - 0x0f, 0xc1, 0x03, 0xa0, 0xdd, 0xd8, 0xa9, 0xd3, 0xb8, 0x22, 0x05, 0xc1, 0x55, 0x36, 0x33, 0x67, - 0xce, 0x9c, 0xe3, 0x19, 0x7b, 0x61, 0x4b, 0x63, 0xac, 0xdb, 0x81, 0x92, 0x97, 0x7d, 0x73, 0x0a, - 0xa3, 0xa0, 0x6d, 0x7e, 0x5b, 0x52, 0x09, 0x2d, 0x68, 0xc5, 0x24, 0x5a, 0x69, 0xc2, 0x59, 0x82, - 0x85, 0x83, 0x81, 0xd4, 0x43, 0xe7, 0x18, 0x96, 0xce, 0xd8, 0x90, 0x0b, 0x76, 0x45, 0x5f, 0x40, - 0x49, 0x0f, 0x25, 0xd6, 0x49, 0x93, 0xb8, 0xd5, 0xce, 0x46, 0x2b, 0x5f, 0xd0, 0x4a, 0x41, 0xe7, - 0x43, 0x89, 0x9e, 0x85, 0x51, 0x0a, 0xa5, 0x0b, 0x71, 0x35, 0xac, 0xcf, 0x35, 0x89, 0x5b, 0xf1, - 0xec, 0xd9, 0xf9, 0x41, 0x60, 0xd5, 0x0f, 0x07, 0x92, 0xa3, 0x87, 0x9f, 0x13, 0x8c, 0x35, 0x7d, - 0x0b, 0xab, 0x0a, 0x63, 0x29, 0xa2, 0x18, 0xfb, 0xb3, 0xb1, 0x57, 0x32, 0xbc, 0xf9, 0x47, 0x9f, - 0xe6, 0xea, 0xe3, 0xf0, 0x16, 0x6d, 0xbb, 0x85, 0x3b, 0x90, 0x1f, 0xde, 0x22, 0x6d, 0xc3, 0x92, - 0x1c, 0x31, 0xd4, 0xe7, 0x9b, 0xc4, 0x2d, 0x77, 0xd6, 0x0b, 0xe9, 0xbd, 0x0c, 0x65, 0x58, 0xaf, - 0x43, 0xce, 0xfb, 0x49, 0x8c, 0x2a, 0x62, 0x03, 0xac, 0x97, 0x9a, 0xc4, 0x5d, 0xf6, 0x2a, 0x26, - 0xd8, 0x4b, 0x63, 0xd4, 0x85, 0x9a, 0x05, 0x09, 0x96, 0xe8, 0x4f, 0xfd, 0xf8, 0x52, 0x48, 0xac, - 0x2f, 0x58, 0x5c, 0xd5, 0xc4, 0xbb, 0x26, 0xec, 0x9b, 0xa8, 0xf3, 0x15, 0xaa, 0x99, 0xeb, 0x91, - 0xaa, 0xbc, 0x22, 0x32, 0x93, 0xa2, 0x06, 0x2c, 0x8f, 0xc5, 0x18, 0x8b, 0x2b, 0xde, 0xf8, 0x3f, - 0x7d, 0x02, 0xe5, 0xbc, 0x86, 0x79, 0x9b, 0x06, 0x71, 0xd7, 0xff, 0x18, 0x36, 0x7c, 0xad, 0x90, - 0x0d, 0xc2, 0x28, 0x38, 0x8a, 0x64, 0xa2, 0xf7, 0x18, 0xe7, 0xd9, 0x04, 0x1e, 0x2b, 0xc5, 0x39, - 0x87, 0x46, 0x11, 0x5b, 0xea, 0xec, 0x0d, 0xfc, 0xcf, 0x82, 0x40, 0x61, 0xc0, 0x34, 0x5e, 0xf5, - 0xd3, 0x9a, 0xd1, 0x68, 0x88, 0x1d, 0xcd, 0xfa, 0x5d, 0x3a, 0xa5, 0x36, 0x33, 0x72, 0x8e, 0x80, - 0x66, 0x1c, 0x67, 0x4c, 0xb1, 0x01, 0x6a, 0x54, 0xb1, 0x59, 0xa2, 0x5c, 0xa9, 0x3d, 0x1b, 0xbb, - 0x61, 0xa4, 0x51, 0xdd, 0x30, 0x33, 0xa0, 0x74, 0xe0, 0x90, 0x85, 0x7a, 0xb1, 0xf3, 0x9d, 0xe4, - 0x14, 0x76, 0x13, 0x7d, 0xcf, 0xf0, 0x9f, 0xae, 0xdc, 0x07, 0x58, 0x1b, 0xd7, 0xcb, 0xb1, 0xd4, - 0xfa, 0x5c, 0x73, 0xde, 0x2d, 0x77, 0x9a, 0x93, 0x2c, 0xd3, 0x96, 0x3c, 0xaa, 0xa6, 0x6d, 0x3e, - 0x76, 0x41, 0x9d, 0x53, 0xd8, 0x2c, 0x74, 0xf8, 0x9b, 0xeb, 0xb5, 0xfd, 0x0e, 0xca, 0x39, 0xc3, - 0xb4, 0x06, 0x95, 0xbd, 0xee, 0xc9, 0x99, 0x77, 0xe0, 0xfb, 0x3b, 0xbb, 0xc7, 0x07, 0xb5, 0xff, - 0x28, 0x85, 0x6a, 0xef, 0x74, 0x22, 0x46, 0x28, 0xc0, 0xa2, 0xb7, 0x73, 0xba, 0xdf, 0x3d, 0xa9, - 0xcd, 0x75, 0xbe, 0x95, 0xa0, 0x7c, 0x8e, 0xb1, 0xf6, 0x51, 0xdd, 0x84, 0x97, 0x48, 0x5f, 0xc3, - 0x8a, 0xfd, 0x80, 0x18, 0x59, 0x74, 0x6d, 0xb2, 0xbb, 0x4d, 0x34, 0x8a, 0x82, 0xf4, 0x10, 0x56, - 0x7a, 0x11, 0x53, 0xa3, 0xb2, 0xcd, 0x49, 0xc4, 0xc4, 0x87, 0xa3, 0xb1, 0x55, 0x9c, 0x4c, 0x1f, - 0x00, 0x87, 0xb5, 0x82, 0xe7, 0x43, 0xdd, 0x7b, 0x45, 0x0f, 0x2e, 0x49, 0xe3, 0xf9, 0x0c, 0xc8, - 0x51, 0xaf, 0x97, 0x84, 0x86, 0x40, 0xa7, 0xdf, 0x08, 0xfa, 0xec, 0x01, 0x8a, 0xfb, 0x6f, 0x60, - 0xc3, 0xfd, 0x35, 0x70, 0xd4, 0xca, 0x35, 0xad, 0xaa, 0x87, 0x09, 0xe7, 0xfb, 0x89, 0xe4, 0xf8, - 0xe5, 0xaf, 0x79, 0x72, 0x89, 0x75, 0x55, 0x7d, 0xcf, 0xf8, 0xf5, 0x3f, 0x68, 0xb5, 0xbb, 0xfd, - 0xd1, 0x0d, 0x84, 0x08, 0x38, 0xb6, 0x02, 0xc1, 0x59, 0x14, 0xb4, 0x84, 0x0a, 0xec, 0x4d, 0xd5, - 0x9e, 0xba, 0xb3, 0x2e, 0x16, 0xed, 0x7d, 0xf5, 0xea, 0x67, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7e, - 0x50, 0x51, 0x5b, 0xcf, 0x06, 0x00, 0x00, +var File_test_grpc_testing_test_proto protoreflect.FileDescriptor + +var file_test_grpc_testing_test_proto_rawDesc = []byte{ + 0x0a, 0x1c, 0x74, 0x65, 0x73, 0x74, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x74, 0x65, 0x73, 0x74, + 0x69, 0x6e, 0x67, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, + 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x22, 0x07, 0x0a, 0x05, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x4c, 0x0a, 0x07, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, + 0x12, 0x2d, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, + 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x50, 0x61, + 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, + 0x6f, 0x64, 0x79, 0x22, 0xf4, 0x01, 0x0a, 0x0d, 0x53, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3e, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x67, + 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x50, 0x61, 0x79, 0x6c, + 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0c, 0x72, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x2f, 0x0a, 0x07, 0x70, 0x61, + 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x67, 0x72, + 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x50, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x66, + 0x69, 0x6c, 0x6c, 0x5f, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x0c, 0x66, 0x69, 0x6c, 0x6c, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x28, 0x0a, 0x10, 0x66, 0x69, 0x6c, 0x6c, 0x5f, 0x6f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x73, + 0x63, 0x6f, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x66, 0x69, 0x6c, 0x6c, + 0x4f, 0x61, 0x75, 0x74, 0x68, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x22, 0x7e, 0x0a, 0x0e, 0x53, 0x69, + 0x6d, 0x70, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x07, + 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, + 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x50, 0x61, 0x79, + 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x1a, 0x0a, + 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x61, 0x75, + 0x74, 0x68, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x6f, 0x61, 0x75, 0x74, 0x68, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x22, 0x4c, 0x0a, 0x19, 0x53, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x43, 0x61, 0x6c, 0x6c, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, + 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, + 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x52, + 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x54, 0x0a, 0x1a, 0x53, 0x74, 0x72, 0x65, + 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x17, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, + 0x61, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x73, 0x69, 0x7a, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x15, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x65, 0x64, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x53, 0x69, 0x7a, 0x65, 0x22, 0x49, + 0x0a, 0x12, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x76, 0x61, 0x6c, 0x5f, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x55, 0x73, 0x22, 0xe0, 0x01, 0x0a, 0x1a, 0x53, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x43, 0x61, 0x6c, + 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3e, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x19, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x50, + 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x51, 0x0a, 0x13, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, + 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x52, 0x12, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x2f, 0x0a, 0x07, 0x70, + 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x67, + 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x50, 0x61, 0x79, 0x6c, + 0x6f, 0x61, 0x64, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x4e, 0x0a, 0x1b, + 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x43, + 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x07, 0x70, + 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x67, + 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x50, 0x61, 0x79, 0x6c, + 0x6f, 0x61, 0x64, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x2a, 0x3f, 0x0a, 0x0b, + 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x0c, 0x43, + 0x4f, 0x4d, 0x50, 0x52, 0x45, 0x53, 0x53, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x00, 0x12, 0x12, 0x0a, + 0x0e, 0x55, 0x4e, 0x43, 0x4f, 0x4d, 0x50, 0x52, 0x45, 0x53, 0x53, 0x41, 0x42, 0x4c, 0x45, 0x10, + 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x52, 0x41, 0x4e, 0x44, 0x4f, 0x4d, 0x10, 0x02, 0x32, 0xbb, 0x04, + 0x0a, 0x0b, 0x54, 0x65, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x35, 0x0a, + 0x09, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x43, 0x61, 0x6c, 0x6c, 0x12, 0x13, 0x2e, 0x67, 0x72, 0x70, + 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, + 0x13, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x45, + 0x6d, 0x70, 0x74, 0x79, 0x12, 0x46, 0x0a, 0x09, 0x55, 0x6e, 0x61, 0x72, 0x79, 0x43, 0x61, 0x6c, + 0x6c, 0x12, 0x1b, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, + 0x2e, 0x53, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, + 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x69, + 0x6d, 0x70, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6c, 0x0a, 0x13, + 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x43, + 0x61, 0x6c, 0x6c, 0x12, 0x28, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, + 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x4f, 0x75, 0x74, 0x70, + 0x75, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, + 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x72, + 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x43, 0x61, 0x6c, 0x6c, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x69, 0x0a, 0x12, 0x53, 0x74, + 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x43, 0x61, 0x6c, 0x6c, + 0x12, 0x27, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, + 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x43, 0x61, + 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x67, 0x72, 0x70, 0x63, + 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, + 0x6e, 0x67, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x28, 0x01, 0x12, 0x69, 0x0a, 0x0e, 0x46, 0x75, 0x6c, 0x6c, 0x44, 0x75, 0x70, + 0x6c, 0x65, 0x78, 0x43, 0x61, 0x6c, 0x6c, 0x12, 0x28, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, + 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x29, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, + 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, + 0x12, 0x69, 0x0a, 0x0e, 0x48, 0x61, 0x6c, 0x66, 0x44, 0x75, 0x70, 0x6c, 0x65, 0x78, 0x43, 0x61, + 0x6c, 0x6c, 0x12, 0x28, 0x2e, 0x67, 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, + 0x67, 0x2e, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, + 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x67, + 0x72, 0x70, 0x63, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x53, 0x74, 0x72, 0x65, + 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x43, 0x61, 0x6c, 0x6c, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, 0x2a, 0x5a, 0x28, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, + 0x2f, 0x67, 0x72, 0x70, 0x63, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x5f, + 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_test_grpc_testing_test_proto_rawDescOnce sync.Once + file_test_grpc_testing_test_proto_rawDescData = file_test_grpc_testing_test_proto_rawDesc +) + +func file_test_grpc_testing_test_proto_rawDescGZIP() []byte { + file_test_grpc_testing_test_proto_rawDescOnce.Do(func() { + file_test_grpc_testing_test_proto_rawDescData = protoimpl.X.CompressGZIP(file_test_grpc_testing_test_proto_rawDescData) + }) + return file_test_grpc_testing_test_proto_rawDescData +} + +var file_test_grpc_testing_test_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_test_grpc_testing_test_proto_msgTypes = make([]protoimpl.MessageInfo, 9) +var file_test_grpc_testing_test_proto_goTypes = []interface{}{ + (PayloadType)(0), // 0: grpc.testing.PayloadType + (*Empty)(nil), // 1: grpc.testing.Empty + (*Payload)(nil), // 2: grpc.testing.Payload + (*SimpleRequest)(nil), // 3: grpc.testing.SimpleRequest + (*SimpleResponse)(nil), // 4: grpc.testing.SimpleResponse + (*StreamingInputCallRequest)(nil), // 5: grpc.testing.StreamingInputCallRequest + (*StreamingInputCallResponse)(nil), // 6: grpc.testing.StreamingInputCallResponse + (*ResponseParameters)(nil), // 7: grpc.testing.ResponseParameters + (*StreamingOutputCallRequest)(nil), // 8: grpc.testing.StreamingOutputCallRequest + (*StreamingOutputCallResponse)(nil), // 9: grpc.testing.StreamingOutputCallResponse +} +var file_test_grpc_testing_test_proto_depIdxs = []int32{ + 0, // 0: grpc.testing.Payload.type:type_name -> grpc.testing.PayloadType + 0, // 1: grpc.testing.SimpleRequest.response_type:type_name -> grpc.testing.PayloadType + 2, // 2: grpc.testing.SimpleRequest.payload:type_name -> grpc.testing.Payload + 2, // 3: grpc.testing.SimpleResponse.payload:type_name -> grpc.testing.Payload + 2, // 4: grpc.testing.StreamingInputCallRequest.payload:type_name -> grpc.testing.Payload + 0, // 5: grpc.testing.StreamingOutputCallRequest.response_type:type_name -> grpc.testing.PayloadType + 7, // 6: grpc.testing.StreamingOutputCallRequest.response_parameters:type_name -> grpc.testing.ResponseParameters + 2, // 7: grpc.testing.StreamingOutputCallRequest.payload:type_name -> grpc.testing.Payload + 2, // 8: grpc.testing.StreamingOutputCallResponse.payload:type_name -> grpc.testing.Payload + 1, // 9: grpc.testing.TestService.EmptyCall:input_type -> grpc.testing.Empty + 3, // 10: grpc.testing.TestService.UnaryCall:input_type -> grpc.testing.SimpleRequest + 8, // 11: grpc.testing.TestService.StreamingOutputCall:input_type -> grpc.testing.StreamingOutputCallRequest + 5, // 12: grpc.testing.TestService.StreamingInputCall:input_type -> grpc.testing.StreamingInputCallRequest + 8, // 13: grpc.testing.TestService.FullDuplexCall:input_type -> grpc.testing.StreamingOutputCallRequest + 8, // 14: grpc.testing.TestService.HalfDuplexCall:input_type -> grpc.testing.StreamingOutputCallRequest + 1, // 15: grpc.testing.TestService.EmptyCall:output_type -> grpc.testing.Empty + 4, // 16: grpc.testing.TestService.UnaryCall:output_type -> grpc.testing.SimpleResponse + 9, // 17: grpc.testing.TestService.StreamingOutputCall:output_type -> grpc.testing.StreamingOutputCallResponse + 6, // 18: grpc.testing.TestService.StreamingInputCall:output_type -> grpc.testing.StreamingInputCallResponse + 9, // 19: grpc.testing.TestService.FullDuplexCall:output_type -> grpc.testing.StreamingOutputCallResponse + 9, // 20: grpc.testing.TestService.HalfDuplexCall:output_type -> grpc.testing.StreamingOutputCallResponse + 15, // [15:21] is the sub-list for method output_type + 9, // [9:15] is the sub-list for method input_type + 9, // [9:9] is the sub-list for extension type_name + 9, // [9:9] is the sub-list for extension extendee + 0, // [0:9] is the sub-list for field type_name +} + +func init() { file_test_grpc_testing_test_proto_init() } +func file_test_grpc_testing_test_proto_init() { + if File_test_grpc_testing_test_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_test_grpc_testing_test_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Empty); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_test_grpc_testing_test_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Payload); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_test_grpc_testing_test_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SimpleRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_test_grpc_testing_test_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SimpleResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_test_grpc_testing_test_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StreamingInputCallRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_test_grpc_testing_test_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StreamingInputCallResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_test_grpc_testing_test_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ResponseParameters); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_test_grpc_testing_test_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StreamingOutputCallRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_test_grpc_testing_test_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StreamingOutputCallResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_test_grpc_testing_test_proto_rawDesc, + NumEnums: 1, + NumMessages: 9, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_test_grpc_testing_test_proto_goTypes, + DependencyIndexes: file_test_grpc_testing_test_proto_depIdxs, + EnumInfos: file_test_grpc_testing_test_proto_enumTypes, + MessageInfos: file_test_grpc_testing_test_proto_msgTypes, + }.Build() + File_test_grpc_testing_test_proto = out.File + file_test_grpc_testing_test_proto_rawDesc = nil + file_test_grpc_testing_test_proto_goTypes = nil + file_test_grpc_testing_test_proto_depIdxs = nil } diff --git a/vendor/google.golang.org/grpc/test/grpc_testing/test_grpc.pb.go b/vendor/google.golang.org/grpc/test/grpc_testing/test_grpc.pb.go index 2340ac05e89..ab3b68a92bc 100644 --- a/vendor/google.golang.org/grpc/test/grpc_testing/test_grpc.pb.go +++ b/vendor/google.golang.org/grpc/test/grpc_testing/test_grpc.pb.go @@ -11,7 +11,8 @@ import ( // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion6 +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 // TestServiceClient is the client API for TestService service. // @@ -66,7 +67,7 @@ func (c *testServiceClient) UnaryCall(ctx context.Context, in *SimpleRequest, op } func (c *testServiceClient) StreamingOutputCall(ctx context.Context, in *StreamingOutputCallRequest, opts ...grpc.CallOption) (TestService_StreamingOutputCallClient, error) { - stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[0], "/grpc.testing.TestService/StreamingOutputCall", opts...) + stream, err := c.cc.NewStream(ctx, &TestService_ServiceDesc.Streams[0], "/grpc.testing.TestService/StreamingOutputCall", opts...) if err != nil { return nil, err } @@ -98,7 +99,7 @@ func (x *testServiceStreamingOutputCallClient) Recv() (*StreamingOutputCallRespo } func (c *testServiceClient) StreamingInputCall(ctx context.Context, opts ...grpc.CallOption) (TestService_StreamingInputCallClient, error) { - stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[1], "/grpc.testing.TestService/StreamingInputCall", opts...) + stream, err := c.cc.NewStream(ctx, &TestService_ServiceDesc.Streams[1], "/grpc.testing.TestService/StreamingInputCall", opts...) if err != nil { return nil, err } @@ -132,7 +133,7 @@ func (x *testServiceStreamingInputCallClient) CloseAndRecv() (*StreamingInputCal } func (c *testServiceClient) FullDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_FullDuplexCallClient, error) { - stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[2], "/grpc.testing.TestService/FullDuplexCall", opts...) + stream, err := c.cc.NewStream(ctx, &TestService_ServiceDesc.Streams[2], "/grpc.testing.TestService/FullDuplexCall", opts...) if err != nil { return nil, err } @@ -163,7 +164,7 @@ func (x *testServiceFullDuplexCallClient) Recv() (*StreamingOutputCallResponse, } func (c *testServiceClient) HalfDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_HalfDuplexCallClient, error) { - stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[3], "/grpc.testing.TestService/HalfDuplexCall", opts...) + stream, err := c.cc.NewStream(ctx, &TestService_ServiceDesc.Streams[3], "/grpc.testing.TestService/HalfDuplexCall", opts...) if err != nil { return nil, err } @@ -194,7 +195,7 @@ func (x *testServiceHalfDuplexCallClient) Recv() (*StreamingOutputCallResponse, } // TestServiceServer is the server API for TestService service. -// All implementations should embed UnimplementedTestServiceServer +// All implementations must embed UnimplementedTestServiceServer // for forward compatibility type TestServiceServer interface { // One empty request followed by one empty response. @@ -217,33 +218,42 @@ type TestServiceServer interface { // stream of responses are returned to the client when the server starts with // first request. HalfDuplexCall(TestService_HalfDuplexCallServer) error + mustEmbedUnimplementedTestServiceServer() } -// UnimplementedTestServiceServer should be embedded to have forward compatible implementations. +// UnimplementedTestServiceServer must be embedded to have forward compatible implementations. type UnimplementedTestServiceServer struct { } -func (*UnimplementedTestServiceServer) EmptyCall(context.Context, *Empty) (*Empty, error) { +func (UnimplementedTestServiceServer) EmptyCall(context.Context, *Empty) (*Empty, error) { return nil, status.Errorf(codes.Unimplemented, "method EmptyCall not implemented") } -func (*UnimplementedTestServiceServer) UnaryCall(context.Context, *SimpleRequest) (*SimpleResponse, error) { +func (UnimplementedTestServiceServer) UnaryCall(context.Context, *SimpleRequest) (*SimpleResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method UnaryCall not implemented") } -func (*UnimplementedTestServiceServer) StreamingOutputCall(*StreamingOutputCallRequest, TestService_StreamingOutputCallServer) error { +func (UnimplementedTestServiceServer) StreamingOutputCall(*StreamingOutputCallRequest, TestService_StreamingOutputCallServer) error { return status.Errorf(codes.Unimplemented, "method StreamingOutputCall not implemented") } -func (*UnimplementedTestServiceServer) StreamingInputCall(TestService_StreamingInputCallServer) error { +func (UnimplementedTestServiceServer) StreamingInputCall(TestService_StreamingInputCallServer) error { return status.Errorf(codes.Unimplemented, "method StreamingInputCall not implemented") } -func (*UnimplementedTestServiceServer) FullDuplexCall(TestService_FullDuplexCallServer) error { +func (UnimplementedTestServiceServer) FullDuplexCall(TestService_FullDuplexCallServer) error { return status.Errorf(codes.Unimplemented, "method FullDuplexCall not implemented") } -func (*UnimplementedTestServiceServer) HalfDuplexCall(TestService_HalfDuplexCallServer) error { +func (UnimplementedTestServiceServer) HalfDuplexCall(TestService_HalfDuplexCallServer) error { return status.Errorf(codes.Unimplemented, "method HalfDuplexCall not implemented") } +func (UnimplementedTestServiceServer) mustEmbedUnimplementedTestServiceServer() {} -func RegisterTestServiceServer(s *grpc.Server, srv TestServiceServer) { - s.RegisterService(&_TestService_serviceDesc, srv) +// UnsafeTestServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to TestServiceServer will +// result in compilation errors. +type UnsafeTestServiceServer interface { + mustEmbedUnimplementedTestServiceServer() +} + +func RegisterTestServiceServer(s grpc.ServiceRegistrar, srv TestServiceServer) { + s.RegisterService(&TestService_ServiceDesc, srv) } func _TestService_EmptyCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { @@ -381,7 +391,10 @@ func (x *testServiceHalfDuplexCallServer) Recv() (*StreamingOutputCallRequest, e return m, nil } -var _TestService_serviceDesc = grpc.ServiceDesc{ +// TestService_ServiceDesc is the grpc.ServiceDesc for TestService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var TestService_ServiceDesc = grpc.ServiceDesc{ ServiceName: "grpc.testing.TestService", HandlerType: (*TestServiceServer)(nil), Methods: []grpc.MethodDesc{ diff --git a/vendor/google.golang.org/grpc/version.go b/vendor/google.golang.org/grpc/version.go index de387f7b86a..1051b7eff99 100644 --- a/vendor/google.golang.org/grpc/version.go +++ b/vendor/google.golang.org/grpc/version.go @@ -19,4 +19,4 @@ package grpc // Version is the current grpc version. -const Version = "1.31.1" +const Version = "1.36.1" diff --git a/vendor/google.golang.org/grpc/vet.sh b/vendor/google.golang.org/grpc/vet.sh index 8b7dff19adb..b41df6dc860 100644 --- a/vendor/google.golang.org/grpc/vet.sh +++ b/vendor/google.golang.org/grpc/vet.sh @@ -53,13 +53,21 @@ if [[ "$1" = "-install" ]]; then fi if [[ -z "${VET_SKIP_PROTO}" ]]; then if [[ "${TRAVIS}" = "true" ]]; then - PROTOBUF_VERSION=3.3.0 + PROTOBUF_VERSION=3.14.0 PROTOC_FILENAME=protoc-${PROTOBUF_VERSION}-linux-x86_64.zip pushd /home/travis wget https://github.com/google/protobuf/releases/download/v${PROTOBUF_VERSION}/${PROTOC_FILENAME} unzip ${PROTOC_FILENAME} bin/protoc --version popd + elif [[ "${GITHUB_ACTIONS}" = "true" ]]; then + PROTOBUF_VERSION=3.14.0 + PROTOC_FILENAME=protoc-${PROTOBUF_VERSION}-linux-x86_64.zip + pushd /home/runner/go + wget https://github.com/google/protobuf/releases/download/v${PROTOBUF_VERSION}/${PROTOC_FILENAME} + unzip ${PROTOC_FILENAME} + bin/protoc --version + popd elif not which protoc > /dev/null; then die "Please install protoc into your path" fi @@ -83,11 +91,14 @@ not git grep -l 'x/net/context' -- "*.go" # thread safety. git grep -l '"math/rand"' -- "*.go" 2>&1 | not grep -v '^examples\|^stress\|grpcrand\|^benchmark\|wrr_test' +# - Do not call grpclog directly. Use grpclog.Component instead. +git grep -l 'grpclog.I\|grpclog.W\|grpclog.E\|grpclog.F\|grpclog.V' -- "*.go" | not grep -v '^grpclog/component.go\|^internal/grpctest/tlogger_test.go' + # - Ensure all ptypes proto packages are renamed when importing. not git grep "\(import \|^\s*\)\"github.com/golang/protobuf/ptypes/" -- "*.go" # - Ensure all xds proto imports are renamed to *pb or *grpc. -git grep '"github.com/envoyproxy/go-control-plane/envoy' -- '*.go' | not grep -v 'pb "\|grpc "' +git grep '"github.com/envoyproxy/go-control-plane/envoy' -- '*.go' ':(exclude)*.pb.go' | not grep -v 'pb "\|grpc "' # - Check imports that are illegal in appengine (until Go 1.11). # TODO: Remove when we drop Go 1.10 support @@ -95,8 +106,8 @@ go list -f {{.Dir}} ./... | xargs go run test/go_vet/vet.go # - gofmt, goimports, golint (with exceptions for generated code), go vet. gofmt -s -d -l . 2>&1 | fail_on_output -goimports -l . 2>&1 | not grep -vE "(_mock|\.pb)\.go" -golint ./... 2>&1 | not grep -vE "(_mock|\.pb)\.go:" +goimports -l . 2>&1 | not grep -vE "\.pb\.go" +golint ./... 2>&1 | not grep -vE "\.pb\.go:" go vet -all ./... misspell -error . @@ -154,7 +165,26 @@ grpc.WithTimeout http.CloseNotifier info.SecurityVersion resolver.Backend -resolver.GRPCLB' "${SC_OUT}" +resolver.GRPCLB +extDesc.Filename is deprecated +BuildVersion is deprecated +github.com/golang/protobuf/jsonpb is deprecated +proto is deprecated +xxx_messageInfo_ +proto.InternalMessageInfo is deprecated +proto.EnumName is deprecated +proto.ErrInternalBadWireType is deprecated +proto.FileDescriptor is deprecated +proto.Marshaler is deprecated +proto.MessageType is deprecated +proto.RegisterEnum is deprecated +proto.RegisterFile is deprecated +proto.RegisterType is deprecated +proto.RegisterExtension is deprecated +proto.RegisteredExtension is deprecated +proto.RegisteredExtensions is deprecated +proto.RegisterMapType is deprecated +proto.Unmarshaler is deprecated' "${SC_OUT}" # - special golint on package comments. lint_package_comment_per_package() { diff --git a/vendor/google.golang.org/protobuf/encoding/protojson/decode.go b/vendor/google.golang.org/protobuf/encoding/protojson/decode.go index 2a6077590bc..9bf4e8c1763 100644 --- a/vendor/google.golang.org/protobuf/encoding/protojson/decode.go +++ b/vendor/google.golang.org/protobuf/encoding/protojson/decode.go @@ -15,6 +15,7 @@ import ( "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/internal/set" "google.golang.org/protobuf/proto" @@ -111,8 +112,8 @@ func (d decoder) syntaxError(pos int, f string, x ...interface{}) error { // unmarshalMessage unmarshals a message into the given protoreflect.Message. func (d decoder) unmarshalMessage(m pref.Message, skipTypeURL bool) error { - if isCustomType(m.Descriptor().FullName()) { - return d.unmarshalCustomType(m) + if unmarshal := wellKnownTypeUnmarshaler(m.Descriptor().FullName()); unmarshal != nil { + return unmarshal(d, m) } tok, err := d.Read() @@ -267,12 +268,12 @@ func (d decoder) findExtension(xtName pref.FullName) (pref.ExtensionType, error) func isKnownValue(fd pref.FieldDescriptor) bool { md := fd.Message() - return md != nil && md.FullName() == "google.protobuf.Value" + return md != nil && md.FullName() == genid.Value_message_fullname } func isNullValue(fd pref.FieldDescriptor) bool { ed := fd.Enum() - return ed != nil && ed.FullName() == "google.protobuf.NullValue" + return ed != nil && ed.FullName() == genid.NullValue_enum_fullname } // unmarshalSingular unmarshals to the non-repeated field specified diff --git a/vendor/google.golang.org/protobuf/encoding/protojson/encode.go b/vendor/google.golang.org/protobuf/encoding/protojson/encode.go index 73b750a1926..7d619330081 100644 --- a/vendor/google.golang.org/protobuf/encoding/protojson/encode.go +++ b/vendor/google.golang.org/protobuf/encoding/protojson/encode.go @@ -13,6 +13,7 @@ import ( "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/proto" pref "google.golang.org/protobuf/reflect/protoreflect" @@ -146,8 +147,8 @@ type encoder struct { // marshalMessage marshals the given protoreflect.Message. func (e encoder) marshalMessage(m pref.Message) error { - if isCustomType(m.Descriptor().FullName()) { - return e.marshalCustomType(m) + if marshal := wellKnownTypeMarshaler(m.Descriptor().FullName()); marshal != nil { + return marshal(e, m) } e.StartObject() @@ -268,7 +269,7 @@ func (e encoder) marshalSingular(val pref.Value, fd pref.FieldDescriptor) error e.WriteString(base64.StdEncoding.EncodeToString(val.Bytes())) case pref.EnumKind: - if fd.Enum().FullName() == "google.protobuf.NullValue" { + if fd.Enum().FullName() == genid.NullValue_enum_fullname { e.WriteNull() } else { desc := fd.Enum().Values().ByNumber(val.Enum()) diff --git a/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go b/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go index 3c3ef14da32..def7377c78b 100644 --- a/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go +++ b/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go @@ -11,81 +11,88 @@ import ( "strings" "time" - "google.golang.org/protobuf/internal/detectknown" "google.golang.org/protobuf/internal/encoding/json" "google.golang.org/protobuf/internal/errors" - "google.golang.org/protobuf/internal/fieldnum" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/strs" "google.golang.org/protobuf/proto" pref "google.golang.org/protobuf/reflect/protoreflect" ) -// isCustomType returns true if type name has special JSON conversion rules. -// The list of custom types here has to match the ones in marshalCustomType and -// unmarshalCustomType. -func isCustomType(name pref.FullName) bool { - switch detectknown.Which(name) { - case detectknown.AnyProto: - case detectknown.TimestampProto: - case detectknown.DurationProto: - case detectknown.WrappersProto: - case detectknown.StructProto: - case detectknown.FieldMaskProto: - case detectknown.EmptyProto: - default: - return false - } - return true -} - -// marshalCustomType marshals given well-known type message that have special -// JSON conversion rules. It needs to be a message type where isCustomType -// returns true, else it will panic. -func (e encoder) marshalCustomType(m pref.Message) error { - name := m.Descriptor().FullName() - switch detectknown.Which(name) { - case detectknown.AnyProto: - return e.marshalAny(m) - case detectknown.TimestampProto: - return e.marshalTimestamp(m) - case detectknown.DurationProto: - return e.marshalDuration(m) - case detectknown.WrappersProto: - return e.marshalWrapperType(m) - case detectknown.StructProto: - return e.marshalStructType(m) - case detectknown.FieldMaskProto: - return e.marshalFieldMask(m) - case detectknown.EmptyProto: - return e.marshalEmpty(m) - default: - panic(fmt.Sprintf("%s does not have a custom marshaler", name)) +type marshalFunc func(encoder, pref.Message) error + +// wellKnownTypeMarshaler returns a marshal function if the message type +// has specialized serialization behavior. It returns nil otherwise. +func wellKnownTypeMarshaler(name pref.FullName) marshalFunc { + if name.Parent() == genid.GoogleProtobuf_package { + switch name.Name() { + case genid.Any_message_name: + return encoder.marshalAny + case genid.Timestamp_message_name: + return encoder.marshalTimestamp + case genid.Duration_message_name: + return encoder.marshalDuration + case genid.BoolValue_message_name, + genid.Int32Value_message_name, + genid.Int64Value_message_name, + genid.UInt32Value_message_name, + genid.UInt64Value_message_name, + genid.FloatValue_message_name, + genid.DoubleValue_message_name, + genid.StringValue_message_name, + genid.BytesValue_message_name: + return encoder.marshalWrapperType + case genid.Struct_message_name: + return encoder.marshalStruct + case genid.ListValue_message_name: + return encoder.marshalListValue + case genid.Value_message_name: + return encoder.marshalKnownValue + case genid.FieldMask_message_name: + return encoder.marshalFieldMask + case genid.Empty_message_name: + return encoder.marshalEmpty + } } + return nil } -// unmarshalCustomType unmarshals given well-known type message that have -// special JSON conversion rules. It needs to be a message type where -// isCustomType returns true, else it will panic. -func (d decoder) unmarshalCustomType(m pref.Message) error { - name := m.Descriptor().FullName() - switch detectknown.Which(name) { - case detectknown.AnyProto: - return d.unmarshalAny(m) - case detectknown.TimestampProto: - return d.unmarshalTimestamp(m) - case detectknown.DurationProto: - return d.unmarshalDuration(m) - case detectknown.WrappersProto: - return d.unmarshalWrapperType(m) - case detectknown.StructProto: - return d.unmarshalStructType(m) - case detectknown.FieldMaskProto: - return d.unmarshalFieldMask(m) - case detectknown.EmptyProto: - return d.unmarshalEmpty(m) - default: - panic(fmt.Sprintf("%s does not have a custom unmarshaler", name)) +type unmarshalFunc func(decoder, pref.Message) error + +// wellKnownTypeUnmarshaler returns a unmarshal function if the message type +// has specialized serialization behavior. It returns nil otherwise. +func wellKnownTypeUnmarshaler(name pref.FullName) unmarshalFunc { + if name.Parent() == genid.GoogleProtobuf_package { + switch name.Name() { + case genid.Any_message_name: + return decoder.unmarshalAny + case genid.Timestamp_message_name: + return decoder.unmarshalTimestamp + case genid.Duration_message_name: + return decoder.unmarshalDuration + case genid.BoolValue_message_name, + genid.Int32Value_message_name, + genid.Int64Value_message_name, + genid.UInt32Value_message_name, + genid.UInt64Value_message_name, + genid.FloatValue_message_name, + genid.DoubleValue_message_name, + genid.StringValue_message_name, + genid.BytesValue_message_name: + return decoder.unmarshalWrapperType + case genid.Struct_message_name: + return decoder.unmarshalStruct + case genid.ListValue_message_name: + return decoder.unmarshalListValue + case genid.Value_message_name: + return decoder.unmarshalKnownValue + case genid.FieldMask_message_name: + return decoder.unmarshalFieldMask + case genid.Empty_message_name: + return decoder.unmarshalEmpty + } } + return nil } // The JSON representation of an Any message uses the regular representation of @@ -96,8 +103,8 @@ func (d decoder) unmarshalCustomType(m pref.Message) error { func (e encoder) marshalAny(m pref.Message) error { fds := m.Descriptor().Fields() - fdType := fds.ByNumber(fieldnum.Any_TypeUrl) - fdValue := fds.ByNumber(fieldnum.Any_Value) + fdType := fds.ByNumber(genid.Any_TypeUrl_field_number) + fdValue := fds.ByNumber(genid.Any_Value_field_number) // Start writing the JSON object. e.StartObject() @@ -109,7 +116,7 @@ func (e encoder) marshalAny(m pref.Message) error { return nil } else { // Return error if type_url field is not set, but value is set. - return errors.New("%s: type_url is not set", m.Descriptor().FullName()) + return errors.New("%s: %v is not set", genid.Any_message_fullname, genid.Any_TypeUrl_field_name) } } @@ -126,7 +133,7 @@ func (e encoder) marshalAny(m pref.Message) error { // Resolve the type in order to unmarshal value field. emt, err := e.opts.Resolver.FindMessageByURL(typeURL) if err != nil { - return errors.New("%s: unable to resolve %q: %v", m.Descriptor().FullName(), typeURL, err) + return errors.New("%s: unable to resolve %q: %v", genid.Any_message_fullname, typeURL, err) } em := emt.New() @@ -135,15 +142,15 @@ func (e encoder) marshalAny(m pref.Message) error { Resolver: e.opts.Resolver, }.Unmarshal(valueVal.Bytes(), em.Interface()) if err != nil { - return errors.New("%s: unable to unmarshal %q: %v", m.Descriptor().FullName(), typeURL, err) + return errors.New("%s: unable to unmarshal %q: %v", genid.Any_message_fullname, typeURL, err) } // If type of value has custom JSON encoding, marshal out a field "value" // with corresponding custom JSON encoding of the embedded message as a // field. - if isCustomType(emt.Descriptor().FullName()) { + if marshal := wellKnownTypeMarshaler(emt.Descriptor().FullName()); marshal != nil { e.WriteName("value") - return e.marshalCustomType(em) + return marshal(e, em) } // Else, marshal out the embedded message's fields in this Any object. @@ -198,10 +205,10 @@ func (d decoder) unmarshalAny(m pref.Message) error { // Create new message for the embedded message type and unmarshal into it. em := emt.New() - if isCustomType(emt.Descriptor().FullName()) { + if unmarshal := wellKnownTypeUnmarshaler(emt.Descriptor().FullName()); unmarshal != nil { // If embedded message is a custom type, // unmarshal the JSON "value" field into it. - if err := d.unmarshalAnyValue(em); err != nil { + if err := d.unmarshalAnyValue(unmarshal, em); err != nil { return err } } else { @@ -221,8 +228,8 @@ func (d decoder) unmarshalAny(m pref.Message) error { } fds := m.Descriptor().Fields() - fdType := fds.ByNumber(fieldnum.Any_TypeUrl) - fdValue := fds.ByNumber(fieldnum.Any_Value) + fdType := fds.ByNumber(genid.Any_TypeUrl_field_number) + fdValue := fds.ByNumber(genid.Any_Value_field_number) m.Set(fdType, pref.ValueOfString(typeURL)) m.Set(fdValue, pref.ValueOfBytes(b)) @@ -345,7 +352,7 @@ func (d decoder) skipJSONValue() error { // unmarshalAnyValue unmarshals the given custom-type message from the JSON // object's "value" field. -func (d decoder) unmarshalAnyValue(m pref.Message) error { +func (d decoder) unmarshalAnyValue(unmarshal unmarshalFunc, m pref.Message) error { // Skip ObjectOpen, and start reading the fields. d.Read() @@ -373,7 +380,7 @@ func (d decoder) unmarshalAnyValue(m pref.Message) error { return d.newError(tok.Pos(), `duplicate "value" field`) } // Unmarshal the field value into the given message. - if err := d.unmarshalCustomType(m); err != nil { + if err := unmarshal(d, m); err != nil { return err } found = true @@ -393,17 +400,14 @@ func (d decoder) unmarshalAnyValue(m pref.Message) error { // Wrapper types are encoded as JSON primitives like string, number or boolean. -// The "value" field has the same field number for all wrapper types. -const wrapperFieldNumber = fieldnum.BoolValue_Value - func (e encoder) marshalWrapperType(m pref.Message) error { - fd := m.Descriptor().Fields().ByNumber(wrapperFieldNumber) + fd := m.Descriptor().Fields().ByNumber(genid.WrapperValue_Value_field_number) val := m.Get(fd) return e.marshalSingular(val, fd) } func (d decoder) unmarshalWrapperType(m pref.Message) error { - fd := m.Descriptor().Fields().ByNumber(wrapperFieldNumber) + fd := m.Descriptor().Fields().ByNumber(genid.WrapperValue_Value_field_number) val, err := d.unmarshalScalar(fd) if err != nil { return err @@ -453,42 +457,16 @@ func (d decoder) unmarshalEmpty(pref.Message) error { } } -func (e encoder) marshalStructType(m pref.Message) error { - switch m.Descriptor().Name() { - case "Struct": - return e.marshalStruct(m) - case "ListValue": - return e.marshalListValue(m) - case "Value": - return e.marshalKnownValue(m) - default: - panic(fmt.Sprintf("invalid struct type: %v", m.Descriptor().FullName())) - } -} - -func (d decoder) unmarshalStructType(m pref.Message) error { - switch m.Descriptor().Name() { - case "Struct": - return d.unmarshalStruct(m) - case "ListValue": - return d.unmarshalListValue(m) - case "Value": - return d.unmarshalKnownValue(m) - default: - panic(fmt.Sprintf("invalid struct type: %v", m.Descriptor().FullName())) - } -} - // The JSON representation for Struct is a JSON object that contains the encoded // Struct.fields map and follows the serialization rules for a map. func (e encoder) marshalStruct(m pref.Message) error { - fd := m.Descriptor().Fields().ByNumber(fieldnum.Struct_Fields) + fd := m.Descriptor().Fields().ByNumber(genid.Struct_Fields_field_number) return e.marshalMap(m.Get(fd).Map(), fd) } func (d decoder) unmarshalStruct(m pref.Message) error { - fd := m.Descriptor().Fields().ByNumber(fieldnum.Struct_Fields) + fd := m.Descriptor().Fields().ByNumber(genid.Struct_Fields_field_number) return d.unmarshalMap(m.Mutable(fd).Map(), fd) } @@ -497,12 +475,12 @@ func (d decoder) unmarshalStruct(m pref.Message) error { // repeated field. func (e encoder) marshalListValue(m pref.Message) error { - fd := m.Descriptor().Fields().ByNumber(fieldnum.ListValue_Values) + fd := m.Descriptor().Fields().ByNumber(genid.ListValue_Values_field_number) return e.marshalList(m.Get(fd).List(), fd) } func (d decoder) unmarshalListValue(m pref.Message) error { - fd := m.Descriptor().Fields().ByNumber(fieldnum.ListValue_Values) + fd := m.Descriptor().Fields().ByNumber(genid.ListValue_Values_field_number) return d.unmarshalList(m.Mutable(fd).List(), fd) } @@ -511,10 +489,10 @@ func (d decoder) unmarshalListValue(m pref.Message) error { // Value message needs to be a oneof field set, else it is an error. func (e encoder) marshalKnownValue(m pref.Message) error { - od := m.Descriptor().Oneofs().ByName("kind") + od := m.Descriptor().Oneofs().ByName(genid.Value_Kind_oneof_name) fd := m.WhichOneof(od) if fd == nil { - return errors.New("%s: none of the oneof fields is set", m.Descriptor().FullName()) + return errors.New("%s: none of the oneof fields is set", genid.Value_message_fullname) } return e.marshalSingular(m.Get(fd), fd) } @@ -530,7 +508,7 @@ func (d decoder) unmarshalKnownValue(m pref.Message) error { switch tok.Kind() { case json.Null: d.Read() - fd = m.Descriptor().Fields().ByNumber(fieldnum.Value_NullValue) + fd = m.Descriptor().Fields().ByNumber(genid.Value_NullValue_field_number) val = pref.ValueOfEnum(0) case json.Bool: @@ -538,7 +516,7 @@ func (d decoder) unmarshalKnownValue(m pref.Message) error { if err != nil { return err } - fd = m.Descriptor().Fields().ByNumber(fieldnum.Value_BoolValue) + fd = m.Descriptor().Fields().ByNumber(genid.Value_BoolValue_field_number) val = pref.ValueOfBool(tok.Bool()) case json.Number: @@ -546,11 +524,11 @@ func (d decoder) unmarshalKnownValue(m pref.Message) error { if err != nil { return err } - fd = m.Descriptor().Fields().ByNumber(fieldnum.Value_NumberValue) + fd = m.Descriptor().Fields().ByNumber(genid.Value_NumberValue_field_number) var ok bool val, ok = unmarshalFloat(tok, 64) if !ok { - return d.newError(tok.Pos(), "invalid google.protobuf.Value: %v", tok.RawString()) + return d.newError(tok.Pos(), "invalid %v: %v", genid.Value_message_fullname, tok.RawString()) } case json.String: @@ -564,25 +542,25 @@ func (d decoder) unmarshalKnownValue(m pref.Message) error { if err != nil { return err } - fd = m.Descriptor().Fields().ByNumber(fieldnum.Value_StringValue) + fd = m.Descriptor().Fields().ByNumber(genid.Value_StringValue_field_number) val = pref.ValueOfString(tok.ParsedString()) case json.ObjectOpen: - fd = m.Descriptor().Fields().ByNumber(fieldnum.Value_StructValue) + fd = m.Descriptor().Fields().ByNumber(genid.Value_StructValue_field_number) val = m.NewField(fd) if err := d.unmarshalStruct(val.Message()); err != nil { return err } case json.ArrayOpen: - fd = m.Descriptor().Fields().ByNumber(fieldnum.Value_ListValue) + fd = m.Descriptor().Fields().ByNumber(genid.Value_ListValue_field_number) val = m.NewField(fd) if err := d.unmarshalListValue(val.Message()); err != nil { return err } default: - return d.newError(tok.Pos(), "invalid google.protobuf.Value: %v", tok.RawString()) + return d.newError(tok.Pos(), "invalid %v: %v", genid.Value_message_fullname, tok.RawString()) } m.Set(fd, val) @@ -608,21 +586,21 @@ const ( func (e encoder) marshalDuration(m pref.Message) error { fds := m.Descriptor().Fields() - fdSeconds := fds.ByNumber(fieldnum.Duration_Seconds) - fdNanos := fds.ByNumber(fieldnum.Duration_Nanos) + fdSeconds := fds.ByNumber(genid.Duration_Seconds_field_number) + fdNanos := fds.ByNumber(genid.Duration_Nanos_field_number) secsVal := m.Get(fdSeconds) nanosVal := m.Get(fdNanos) secs := secsVal.Int() nanos := nanosVal.Int() if secs < -maxSecondsInDuration || secs > maxSecondsInDuration { - return errors.New("%s: seconds out of range %v", m.Descriptor().FullName(), secs) + return errors.New("%s: seconds out of range %v", genid.Duration_message_fullname, secs) } if nanos < -secondsInNanos || nanos > secondsInNanos { - return errors.New("%s: nanos out of range %v", m.Descriptor().FullName(), nanos) + return errors.New("%s: nanos out of range %v", genid.Duration_message_fullname, nanos) } if (secs > 0 && nanos < 0) || (secs < 0 && nanos > 0) { - return errors.New("%s: signs of seconds and nanos do not match", m.Descriptor().FullName()) + return errors.New("%s: signs of seconds and nanos do not match", genid.Duration_message_fullname) } // Generated output always contains 0, 3, 6, or 9 fractional digits, // depending on required precision, followed by the suffix "s". @@ -652,17 +630,17 @@ func (d decoder) unmarshalDuration(m pref.Message) error { secs, nanos, ok := parseDuration(tok.ParsedString()) if !ok { - return d.newError(tok.Pos(), "invalid google.protobuf.Duration value %v", tok.RawString()) + return d.newError(tok.Pos(), "invalid %v value %v", genid.Duration_message_fullname, tok.RawString()) } // Validate seconds. No need to validate nanos because parseDuration would // have covered that already. if secs < -maxSecondsInDuration || secs > maxSecondsInDuration { - return d.newError(tok.Pos(), "google.protobuf.Duration value out of range: %v", tok.RawString()) + return d.newError(tok.Pos(), "%v value out of range: %v", genid.Duration_message_fullname, tok.RawString()) } fds := m.Descriptor().Fields() - fdSeconds := fds.ByNumber(fieldnum.Duration_Seconds) - fdNanos := fds.ByNumber(fieldnum.Duration_Nanos) + fdSeconds := fds.ByNumber(genid.Duration_Seconds_field_number) + fdNanos := fds.ByNumber(genid.Duration_Nanos_field_number) m.Set(fdSeconds, pref.ValueOfInt64(secs)) m.Set(fdNanos, pref.ValueOfInt32(nanos)) @@ -799,18 +777,18 @@ const ( func (e encoder) marshalTimestamp(m pref.Message) error { fds := m.Descriptor().Fields() - fdSeconds := fds.ByNumber(fieldnum.Timestamp_Seconds) - fdNanos := fds.ByNumber(fieldnum.Timestamp_Nanos) + fdSeconds := fds.ByNumber(genid.Timestamp_Seconds_field_number) + fdNanos := fds.ByNumber(genid.Timestamp_Nanos_field_number) secsVal := m.Get(fdSeconds) nanosVal := m.Get(fdNanos) secs := secsVal.Int() nanos := nanosVal.Int() if secs < minTimestampSeconds || secs > maxTimestampSeconds { - return errors.New("%s: seconds out of range %v", m.Descriptor().FullName(), secs) + return errors.New("%s: seconds out of range %v", genid.Timestamp_message_fullname, secs) } if nanos < 0 || nanos > secondsInNanos { - return errors.New("%s: nanos out of range %v", m.Descriptor().FullName(), nanos) + return errors.New("%s: nanos out of range %v", genid.Timestamp_message_fullname, nanos) } // Uses RFC 3339, where generated output will be Z-normalized and uses 0, 3, // 6 or 9 fractional digits. @@ -834,18 +812,18 @@ func (d decoder) unmarshalTimestamp(m pref.Message) error { t, err := time.Parse(time.RFC3339Nano, tok.ParsedString()) if err != nil { - return d.newError(tok.Pos(), "invalid google.protobuf.Timestamp value %v", tok.RawString()) + return d.newError(tok.Pos(), "invalid %v value %v", genid.Timestamp_message_fullname, tok.RawString()) } // Validate seconds. No need to validate nanos because time.Parse would have // covered that already. secs := t.Unix() if secs < minTimestampSeconds || secs > maxTimestampSeconds { - return d.newError(tok.Pos(), "google.protobuf.Timestamp value out of range: %v", tok.RawString()) + return d.newError(tok.Pos(), "%v value out of range: %v", genid.Timestamp_message_fullname, tok.RawString()) } fds := m.Descriptor().Fields() - fdSeconds := fds.ByNumber(fieldnum.Timestamp_Seconds) - fdNanos := fds.ByNumber(fieldnum.Timestamp_Nanos) + fdSeconds := fds.ByNumber(genid.Timestamp_Seconds_field_number) + fdNanos := fds.ByNumber(genid.Timestamp_Nanos_field_number) m.Set(fdSeconds, pref.ValueOfInt64(secs)) m.Set(fdNanos, pref.ValueOfInt32(int32(t.Nanosecond()))) @@ -858,16 +836,19 @@ func (d decoder) unmarshalTimestamp(m pref.Message) error { // end up differently after a round-trip. func (e encoder) marshalFieldMask(m pref.Message) error { - fd := m.Descriptor().Fields().ByNumber(fieldnum.FieldMask_Paths) + fd := m.Descriptor().Fields().ByNumber(genid.FieldMask_Paths_field_number) list := m.Get(fd).List() paths := make([]string, 0, list.Len()) for i := 0; i < list.Len(); i++ { s := list.Get(i).String() + if !pref.FullName(s).IsValid() { + return errors.New("%s contains invalid path: %q", genid.FieldMask_Paths_field_fullname, s) + } // Return error if conversion to camelCase is not reversible. cc := strs.JSONCamelCase(s) if s != strs.JSONSnakeCase(cc) { - return errors.New("%s.paths contains irreversible value %q", m.Descriptor().FullName(), s) + return errors.New("%s contains irreversible value %q", genid.FieldMask_Paths_field_fullname, s) } paths = append(paths, cc) } @@ -890,14 +871,15 @@ func (d decoder) unmarshalFieldMask(m pref.Message) error { } paths := strings.Split(str, ",") - fd := m.Descriptor().Fields().ByNumber(fieldnum.FieldMask_Paths) + fd := m.Descriptor().Fields().ByNumber(genid.FieldMask_Paths_field_number) list := m.Mutable(fd).List() - for _, s := range paths { - s = strings.TrimSpace(s) - // Convert to snake_case. Unlike encoding, no validation is done because - // it is not possible to know the original path names. - list.Append(pref.ValueOfString(strs.JSONSnakeCase(s))) + for _, s0 := range paths { + s := strs.JSONSnakeCase(s0) + if strings.Contains(s0, "_") || !pref.FullName(s).IsValid() { + return d.newError(tok.Pos(), "%v contains invalid path: %q", genid.FieldMask_Paths_field_fullname, s0) + } + list.Append(pref.ValueOfString(s)) } return nil } diff --git a/vendor/google.golang.org/protobuf/encoding/prototext/decode.go b/vendor/google.golang.org/protobuf/encoding/prototext/decode.go index c2f8f28f2cb..cab95a42735 100644 --- a/vendor/google.golang.org/protobuf/encoding/prototext/decode.go +++ b/vendor/google.golang.org/protobuf/encoding/prototext/decode.go @@ -12,8 +12,8 @@ import ( "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/encoding/text" "google.golang.org/protobuf/internal/errors" - "google.golang.org/protobuf/internal/fieldnum" "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/internal/set" "google.golang.org/protobuf/internal/strs" @@ -108,7 +108,7 @@ func (d decoder) unmarshalMessage(m pref.Message, checkDelims bool) error { return errors.New("no support for proto1 MessageSets") } - if messageDesc.FullName() == "google.protobuf.Any" { + if messageDesc.FullName() == genid.Any_message_fullname { return d.unmarshalAny(m, checkDelims) } @@ -538,14 +538,13 @@ Loop: return d.unexpectedTokenError(tok) } - name := tok.IdentName() - switch name { - case "key": + switch name := pref.Name(tok.IdentName()); name { + case genid.MapEntry_Key_field_name: if !tok.HasSeparator() { return d.syntaxError(tok.Pos(), "missing field separator :") } if key.IsValid() { - return d.newError(tok.Pos(), `map entry "key" cannot be repeated`) + return d.newError(tok.Pos(), "map entry %q cannot be repeated", name) } val, err := d.unmarshalScalar(fd.MapKey()) if err != nil { @@ -553,14 +552,14 @@ Loop: } key = val.MapKey() - case "value": + case genid.MapEntry_Value_field_name: if kind := fd.MapValue().Kind(); (kind != pref.MessageKind) && (kind != pref.GroupKind) { if !tok.HasSeparator() { return d.syntaxError(tok.Pos(), "missing field separator :") } } if pval.IsValid() { - return d.newError(tok.Pos(), `map entry "value" cannot be repeated`) + return d.newError(tok.Pos(), "map entry %q cannot be repeated", name) } pval, err = unmarshalMapValue() if err != nil { @@ -597,13 +596,9 @@ Loop: func (d decoder) unmarshalAny(m pref.Message, checkDelims bool) error { var typeURL string var bValue []byte - - // hasFields tracks which valid fields have been seen in the loop below in - // order to flag an error if there are duplicates or conflicts. It may - // contain the strings "type_url", "value" and "expanded". The literal - // "expanded" is used to indicate that the expanded form has been - // encountered already. - hasFields := map[string]bool{} + var seenTypeUrl bool + var seenValue bool + var isExpanded bool if checkDelims { tok, err := d.Read() @@ -642,12 +637,12 @@ Loop: return d.syntaxError(tok.Pos(), "missing field separator :") } - switch tok.IdentName() { - case "type_url": - if hasFields["type_url"] { - return d.newError(tok.Pos(), "duplicate Any type_url field") + switch name := pref.Name(tok.IdentName()); name { + case genid.Any_TypeUrl_field_name: + if seenTypeUrl { + return d.newError(tok.Pos(), "duplicate %v field", genid.Any_TypeUrl_field_fullname) } - if hasFields["expanded"] { + if isExpanded { return d.newError(tok.Pos(), "conflict with [%s] field", typeURL) } tok, err := d.Read() @@ -657,15 +652,15 @@ Loop: var ok bool typeURL, ok = tok.String() if !ok { - return d.newError(tok.Pos(), "invalid Any type_url: %v", tok.RawString()) + return d.newError(tok.Pos(), "invalid %v field value: %v", genid.Any_TypeUrl_field_fullname, tok.RawString()) } - hasFields["type_url"] = true + seenTypeUrl = true - case "value": - if hasFields["value"] { - return d.newError(tok.Pos(), "duplicate Any value field") + case genid.Any_Value_field_name: + if seenValue { + return d.newError(tok.Pos(), "duplicate %v field", genid.Any_Value_field_fullname) } - if hasFields["expanded"] { + if isExpanded { return d.newError(tok.Pos(), "conflict with [%s] field", typeURL) } tok, err := d.Read() @@ -674,22 +669,22 @@ Loop: } s, ok := tok.String() if !ok { - return d.newError(tok.Pos(), "invalid Any value: %v", tok.RawString()) + return d.newError(tok.Pos(), "invalid %v field value: %v", genid.Any_Value_field_fullname, tok.RawString()) } bValue = []byte(s) - hasFields["value"] = true + seenValue = true default: if !d.opts.DiscardUnknown { - return d.newError(tok.Pos(), "invalid field name %q in google.protobuf.Any message", tok.RawString()) + return d.newError(tok.Pos(), "invalid field name %q in %v message", tok.RawString(), genid.Any_message_fullname) } } case text.TypeName: - if hasFields["expanded"] { + if isExpanded { return d.newError(tok.Pos(), "cannot have more than one type") } - if hasFields["type_url"] { + if seenTypeUrl { return d.newError(tok.Pos(), "conflict with type_url field") } typeURL = tok.TypeName() @@ -698,21 +693,21 @@ Loop: if err != nil { return err } - hasFields["expanded"] = true + isExpanded = true default: if !d.opts.DiscardUnknown { - return d.newError(tok.Pos(), "invalid field name %q in google.protobuf.Any message", tok.RawString()) + return d.newError(tok.Pos(), "invalid field name %q in %v message", tok.RawString(), genid.Any_message_fullname) } } } fds := m.Descriptor().Fields() if len(typeURL) > 0 { - m.Set(fds.ByNumber(fieldnum.Any_TypeUrl), pref.ValueOfString(typeURL)) + m.Set(fds.ByNumber(genid.Any_TypeUrl_field_number), pref.ValueOfString(typeURL)) } if len(bValue) > 0 { - m.Set(fds.ByNumber(fieldnum.Any_Value), pref.ValueOfBytes(bValue)) + m.Set(fds.ByNumber(genid.Any_Value_field_number), pref.ValueOfBytes(bValue)) } return nil } diff --git a/vendor/google.golang.org/protobuf/encoding/prototext/encode.go b/vendor/google.golang.org/protobuf/encoding/prototext/encode.go index 41e5c773c0a..0877d71c519 100644 --- a/vendor/google.golang.org/protobuf/encoding/prototext/encode.go +++ b/vendor/google.golang.org/protobuf/encoding/prototext/encode.go @@ -14,8 +14,8 @@ import ( "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/encoding/text" "google.golang.org/protobuf/internal/errors" - "google.golang.org/protobuf/internal/fieldnum" "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/mapsort" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/internal/strs" @@ -162,7 +162,7 @@ func (e encoder) marshalMessage(m pref.Message, inclDelims bool) error { } // Handle Any expansion. - if messageDesc.FullName() == "google.protobuf.Any" { + if messageDesc.FullName() == genid.Any_message_fullname { if e.marshalAny(m) { return nil } @@ -295,13 +295,13 @@ func (e encoder) marshalMap(name string, mmap pref.Map, fd pref.FieldDescriptor) e.StartMessage() defer e.EndMessage() - e.WriteName("key") + e.WriteName(string(genid.MapEntry_Key_field_name)) err = e.marshalSingular(key.Value(), fd.MapKey()) if err != nil { return false } - e.WriteName("value") + e.WriteName(string(genid.MapEntry_Value_field_name)) err = e.marshalSingular(val, fd.MapValue()) if err != nil { return false @@ -399,7 +399,7 @@ func (e encoder) marshalUnknown(b []byte) { func (e encoder) marshalAny(any pref.Message) bool { // Construct the embedded message. fds := any.Descriptor().Fields() - fdType := fds.ByNumber(fieldnum.Any_TypeUrl) + fdType := fds.ByNumber(genid.Any_TypeUrl_field_number) typeURL := any.Get(fdType).String() mt, err := e.opts.Resolver.FindMessageByURL(typeURL) if err != nil { @@ -408,7 +408,7 @@ func (e encoder) marshalAny(any pref.Message) bool { m := mt.New().Interface() // Unmarshal bytes into embedded message. - fdValue := fds.ByNumber(fieldnum.Any_Value) + fdValue := fds.ByNumber(genid.Any_Value_field_number) value := any.Get(fdValue) err = proto.UnmarshalOptions{ AllowPartial: true, diff --git a/vendor/google.golang.org/protobuf/internal/detectknown/detect.go b/vendor/google.golang.org/protobuf/internal/detectknown/detect.go deleted file mode 100644 index f411e39e325..00000000000 --- a/vendor/google.golang.org/protobuf/internal/detectknown/detect.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package detectknown provides functionality for detecting well-known types -// and identifying them by name. -package detectknown - -import "google.golang.org/protobuf/reflect/protoreflect" - -type ProtoFile int - -const ( - Unknown ProtoFile = iota - AnyProto - TimestampProto - DurationProto - WrappersProto - StructProto - FieldMaskProto - ApiProto - TypeProto - SourceContextProto - EmptyProto -) - -var wellKnownTypes = map[protoreflect.FullName]ProtoFile{ - "google.protobuf.Any": AnyProto, - "google.protobuf.Timestamp": TimestampProto, - "google.protobuf.Duration": DurationProto, - "google.protobuf.BoolValue": WrappersProto, - "google.protobuf.Int32Value": WrappersProto, - "google.protobuf.Int64Value": WrappersProto, - "google.protobuf.UInt32Value": WrappersProto, - "google.protobuf.UInt64Value": WrappersProto, - "google.protobuf.FloatValue": WrappersProto, - "google.protobuf.DoubleValue": WrappersProto, - "google.protobuf.BytesValue": WrappersProto, - "google.protobuf.StringValue": WrappersProto, - "google.protobuf.Struct": StructProto, - "google.protobuf.ListValue": StructProto, - "google.protobuf.Value": StructProto, - "google.protobuf.NullValue": StructProto, - "google.protobuf.FieldMask": FieldMaskProto, - "google.protobuf.Api": ApiProto, - "google.protobuf.Method": ApiProto, - "google.protobuf.Mixin": ApiProto, - "google.protobuf.Syntax": TypeProto, - "google.protobuf.Type": TypeProto, - "google.protobuf.Field": TypeProto, - "google.protobuf.Field.Kind": TypeProto, - "google.protobuf.Field.Cardinality": TypeProto, - "google.protobuf.Enum": TypeProto, - "google.protobuf.EnumValue": TypeProto, - "google.protobuf.Option": TypeProto, - "google.protobuf.SourceContext": SourceContextProto, - "google.protobuf.Empty": EmptyProto, -} - -// Which identifies the proto file that a well-known type belongs to. -// This identifies both enums and messages. -func Which(s protoreflect.FullName) ProtoFile { - return wellKnownTypes[s] -} diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/any_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/any_gen.go deleted file mode 100644 index 74c5fef2405..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/any_gen.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.Any. -const ( - Any_TypeUrl = 1 // optional string - Any_Value = 2 // optional bytes -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/api_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/api_gen.go deleted file mode 100644 index 9a6b5f29b57..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/api_gen.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.Api. -const ( - Api_Name = 1 // optional string - Api_Methods = 2 // repeated google.protobuf.Method - Api_Options = 3 // repeated google.protobuf.Option - Api_Version = 4 // optional string - Api_SourceContext = 5 // optional google.protobuf.SourceContext - Api_Mixins = 6 // repeated google.protobuf.Mixin - Api_Syntax = 7 // optional google.protobuf.Syntax -) - -// Field numbers for google.protobuf.Method. -const ( - Method_Name = 1 // optional string - Method_RequestTypeUrl = 2 // optional string - Method_RequestStreaming = 3 // optional bool - Method_ResponseTypeUrl = 4 // optional string - Method_ResponseStreaming = 5 // optional bool - Method_Options = 6 // repeated google.protobuf.Option - Method_Syntax = 7 // optional google.protobuf.Syntax -) - -// Field numbers for google.protobuf.Mixin. -const ( - Mixin_Name = 1 // optional string - Mixin_Root = 2 // optional string -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/descriptor_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/descriptor_gen.go deleted file mode 100644 index 6e37b59e922..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/descriptor_gen.go +++ /dev/null @@ -1,240 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.FileDescriptorSet. -const ( - FileDescriptorSet_File = 1 // repeated google.protobuf.FileDescriptorProto -) - -// Field numbers for google.protobuf.FileDescriptorProto. -const ( - FileDescriptorProto_Name = 1 // optional string - FileDescriptorProto_Package = 2 // optional string - FileDescriptorProto_Dependency = 3 // repeated string - FileDescriptorProto_PublicDependency = 10 // repeated int32 - FileDescriptorProto_WeakDependency = 11 // repeated int32 - FileDescriptorProto_MessageType = 4 // repeated google.protobuf.DescriptorProto - FileDescriptorProto_EnumType = 5 // repeated google.protobuf.EnumDescriptorProto - FileDescriptorProto_Service = 6 // repeated google.protobuf.ServiceDescriptorProto - FileDescriptorProto_Extension = 7 // repeated google.protobuf.FieldDescriptorProto - FileDescriptorProto_Options = 8 // optional google.protobuf.FileOptions - FileDescriptorProto_SourceCodeInfo = 9 // optional google.protobuf.SourceCodeInfo - FileDescriptorProto_Syntax = 12 // optional string -) - -// Field numbers for google.protobuf.DescriptorProto. -const ( - DescriptorProto_Name = 1 // optional string - DescriptorProto_Field = 2 // repeated google.protobuf.FieldDescriptorProto - DescriptorProto_Extension = 6 // repeated google.protobuf.FieldDescriptorProto - DescriptorProto_NestedType = 3 // repeated google.protobuf.DescriptorProto - DescriptorProto_EnumType = 4 // repeated google.protobuf.EnumDescriptorProto - DescriptorProto_ExtensionRange = 5 // repeated google.protobuf.DescriptorProto.ExtensionRange - DescriptorProto_OneofDecl = 8 // repeated google.protobuf.OneofDescriptorProto - DescriptorProto_Options = 7 // optional google.protobuf.MessageOptions - DescriptorProto_ReservedRange = 9 // repeated google.protobuf.DescriptorProto.ReservedRange - DescriptorProto_ReservedName = 10 // repeated string -) - -// Field numbers for google.protobuf.DescriptorProto.ExtensionRange. -const ( - DescriptorProto_ExtensionRange_Start = 1 // optional int32 - DescriptorProto_ExtensionRange_End = 2 // optional int32 - DescriptorProto_ExtensionRange_Options = 3 // optional google.protobuf.ExtensionRangeOptions -) - -// Field numbers for google.protobuf.DescriptorProto.ReservedRange. -const ( - DescriptorProto_ReservedRange_Start = 1 // optional int32 - DescriptorProto_ReservedRange_End = 2 // optional int32 -) - -// Field numbers for google.protobuf.ExtensionRangeOptions. -const ( - ExtensionRangeOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.FieldDescriptorProto. -const ( - FieldDescriptorProto_Name = 1 // optional string - FieldDescriptorProto_Number = 3 // optional int32 - FieldDescriptorProto_Label = 4 // optional google.protobuf.FieldDescriptorProto.Label - FieldDescriptorProto_Type = 5 // optional google.protobuf.FieldDescriptorProto.Type - FieldDescriptorProto_TypeName = 6 // optional string - FieldDescriptorProto_Extendee = 2 // optional string - FieldDescriptorProto_DefaultValue = 7 // optional string - FieldDescriptorProto_OneofIndex = 9 // optional int32 - FieldDescriptorProto_JsonName = 10 // optional string - FieldDescriptorProto_Options = 8 // optional google.protobuf.FieldOptions - FieldDescriptorProto_Proto3Optional = 17 // optional bool -) - -// Field numbers for google.protobuf.OneofDescriptorProto. -const ( - OneofDescriptorProto_Name = 1 // optional string - OneofDescriptorProto_Options = 2 // optional google.protobuf.OneofOptions -) - -// Field numbers for google.protobuf.EnumDescriptorProto. -const ( - EnumDescriptorProto_Name = 1 // optional string - EnumDescriptorProto_Value = 2 // repeated google.protobuf.EnumValueDescriptorProto - EnumDescriptorProto_Options = 3 // optional google.protobuf.EnumOptions - EnumDescriptorProto_ReservedRange = 4 // repeated google.protobuf.EnumDescriptorProto.EnumReservedRange - EnumDescriptorProto_ReservedName = 5 // repeated string -) - -// Field numbers for google.protobuf.EnumDescriptorProto.EnumReservedRange. -const ( - EnumDescriptorProto_EnumReservedRange_Start = 1 // optional int32 - EnumDescriptorProto_EnumReservedRange_End = 2 // optional int32 -) - -// Field numbers for google.protobuf.EnumValueDescriptorProto. -const ( - EnumValueDescriptorProto_Name = 1 // optional string - EnumValueDescriptorProto_Number = 2 // optional int32 - EnumValueDescriptorProto_Options = 3 // optional google.protobuf.EnumValueOptions -) - -// Field numbers for google.protobuf.ServiceDescriptorProto. -const ( - ServiceDescriptorProto_Name = 1 // optional string - ServiceDescriptorProto_Method = 2 // repeated google.protobuf.MethodDescriptorProto - ServiceDescriptorProto_Options = 3 // optional google.protobuf.ServiceOptions -) - -// Field numbers for google.protobuf.MethodDescriptorProto. -const ( - MethodDescriptorProto_Name = 1 // optional string - MethodDescriptorProto_InputType = 2 // optional string - MethodDescriptorProto_OutputType = 3 // optional string - MethodDescriptorProto_Options = 4 // optional google.protobuf.MethodOptions - MethodDescriptorProto_ClientStreaming = 5 // optional bool - MethodDescriptorProto_ServerStreaming = 6 // optional bool -) - -// Field numbers for google.protobuf.FileOptions. -const ( - FileOptions_JavaPackage = 1 // optional string - FileOptions_JavaOuterClassname = 8 // optional string - FileOptions_JavaMultipleFiles = 10 // optional bool - FileOptions_JavaGenerateEqualsAndHash = 20 // optional bool - FileOptions_JavaStringCheckUtf8 = 27 // optional bool - FileOptions_OptimizeFor = 9 // optional google.protobuf.FileOptions.OptimizeMode - FileOptions_GoPackage = 11 // optional string - FileOptions_CcGenericServices = 16 // optional bool - FileOptions_JavaGenericServices = 17 // optional bool - FileOptions_PyGenericServices = 18 // optional bool - FileOptions_PhpGenericServices = 42 // optional bool - FileOptions_Deprecated = 23 // optional bool - FileOptions_CcEnableArenas = 31 // optional bool - FileOptions_ObjcClassPrefix = 36 // optional string - FileOptions_CsharpNamespace = 37 // optional string - FileOptions_SwiftPrefix = 39 // optional string - FileOptions_PhpClassPrefix = 40 // optional string - FileOptions_PhpNamespace = 41 // optional string - FileOptions_PhpMetadataNamespace = 44 // optional string - FileOptions_RubyPackage = 45 // optional string - FileOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.MessageOptions. -const ( - MessageOptions_MessageSetWireFormat = 1 // optional bool - MessageOptions_NoStandardDescriptorAccessor = 2 // optional bool - MessageOptions_Deprecated = 3 // optional bool - MessageOptions_MapEntry = 7 // optional bool - MessageOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.FieldOptions. -const ( - FieldOptions_Ctype = 1 // optional google.protobuf.FieldOptions.CType - FieldOptions_Packed = 2 // optional bool - FieldOptions_Jstype = 6 // optional google.protobuf.FieldOptions.JSType - FieldOptions_Lazy = 5 // optional bool - FieldOptions_Deprecated = 3 // optional bool - FieldOptions_Weak = 10 // optional bool - FieldOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.OneofOptions. -const ( - OneofOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.EnumOptions. -const ( - EnumOptions_AllowAlias = 2 // optional bool - EnumOptions_Deprecated = 3 // optional bool - EnumOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.EnumValueOptions. -const ( - EnumValueOptions_Deprecated = 1 // optional bool - EnumValueOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.ServiceOptions. -const ( - ServiceOptions_Deprecated = 33 // optional bool - ServiceOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.MethodOptions. -const ( - MethodOptions_Deprecated = 33 // optional bool - MethodOptions_IdempotencyLevel = 34 // optional google.protobuf.MethodOptions.IdempotencyLevel - MethodOptions_UninterpretedOption = 999 // repeated google.protobuf.UninterpretedOption -) - -// Field numbers for google.protobuf.UninterpretedOption. -const ( - UninterpretedOption_Name = 2 // repeated google.protobuf.UninterpretedOption.NamePart - UninterpretedOption_IdentifierValue = 3 // optional string - UninterpretedOption_PositiveIntValue = 4 // optional uint64 - UninterpretedOption_NegativeIntValue = 5 // optional int64 - UninterpretedOption_DoubleValue = 6 // optional double - UninterpretedOption_StringValue = 7 // optional bytes - UninterpretedOption_AggregateValue = 8 // optional string -) - -// Field numbers for google.protobuf.UninterpretedOption.NamePart. -const ( - UninterpretedOption_NamePart_NamePart = 1 // required string - UninterpretedOption_NamePart_IsExtension = 2 // required bool -) - -// Field numbers for google.protobuf.SourceCodeInfo. -const ( - SourceCodeInfo_Location = 1 // repeated google.protobuf.SourceCodeInfo.Location -) - -// Field numbers for google.protobuf.SourceCodeInfo.Location. -const ( - SourceCodeInfo_Location_Path = 1 // repeated int32 - SourceCodeInfo_Location_Span = 2 // repeated int32 - SourceCodeInfo_Location_LeadingComments = 3 // optional string - SourceCodeInfo_Location_TrailingComments = 4 // optional string - SourceCodeInfo_Location_LeadingDetachedComments = 6 // repeated string -) - -// Field numbers for google.protobuf.GeneratedCodeInfo. -const ( - GeneratedCodeInfo_Annotation = 1 // repeated google.protobuf.GeneratedCodeInfo.Annotation -) - -// Field numbers for google.protobuf.GeneratedCodeInfo.Annotation. -const ( - GeneratedCodeInfo_Annotation_Path = 1 // repeated int32 - GeneratedCodeInfo_Annotation_SourceFile = 2 // optional string - GeneratedCodeInfo_Annotation_Begin = 3 // optional int32 - GeneratedCodeInfo_Annotation_End = 4 // optional int32 -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/doc.go b/vendor/google.golang.org/protobuf/internal/fieldnum/doc.go deleted file mode 100644 index e5978859980..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/doc.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package fieldnum contains constants for field numbers of fields in messages -// declared in descriptor.proto and any of the well-known types. -package fieldnum diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/duration_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/duration_gen.go deleted file mode 100644 index 8816c7358da..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/duration_gen.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.Duration. -const ( - Duration_Seconds = 1 // optional int64 - Duration_Nanos = 2 // optional int32 -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/empty_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/empty_gen.go deleted file mode 100644 index b5130a6dd14..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/empty_gen.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.Empty. -const () diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/field_mask_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/field_mask_gen.go deleted file mode 100644 index 7e3bfa27bbc..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/field_mask_gen.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.FieldMask. -const ( - FieldMask_Paths = 1 // repeated string -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/source_context_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/source_context_gen.go deleted file mode 100644 index 241972b1f78..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/source_context_gen.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.SourceContext. -const ( - SourceContext_FileName = 1 // optional string -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/struct_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/struct_gen.go deleted file mode 100644 index c460aab44a7..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/struct_gen.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.Struct. -const ( - Struct_Fields = 1 // repeated google.protobuf.Struct.FieldsEntry -) - -// Field numbers for google.protobuf.Struct.FieldsEntry. -const ( - Struct_FieldsEntry_Key = 1 // optional string - Struct_FieldsEntry_Value = 2 // optional google.protobuf.Value -) - -// Field numbers for google.protobuf.Value. -const ( - Value_NullValue = 1 // optional google.protobuf.NullValue - Value_NumberValue = 2 // optional double - Value_StringValue = 3 // optional string - Value_BoolValue = 4 // optional bool - Value_StructValue = 5 // optional google.protobuf.Struct - Value_ListValue = 6 // optional google.protobuf.ListValue -) - -// Field numbers for google.protobuf.ListValue. -const ( - ListValue_Values = 1 // repeated google.protobuf.Value -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/timestamp_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/timestamp_gen.go deleted file mode 100644 index b4346fba547..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/timestamp_gen.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.Timestamp. -const ( - Timestamp_Seconds = 1 // optional int64 - Timestamp_Nanos = 2 // optional int32 -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/type_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/type_gen.go deleted file mode 100644 index b392e95981a..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/type_gen.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.Type. -const ( - Type_Name = 1 // optional string - Type_Fields = 2 // repeated google.protobuf.Field - Type_Oneofs = 3 // repeated string - Type_Options = 4 // repeated google.protobuf.Option - Type_SourceContext = 5 // optional google.protobuf.SourceContext - Type_Syntax = 6 // optional google.protobuf.Syntax -) - -// Field numbers for google.protobuf.Field. -const ( - Field_Kind = 1 // optional google.protobuf.Field.Kind - Field_Cardinality = 2 // optional google.protobuf.Field.Cardinality - Field_Number = 3 // optional int32 - Field_Name = 4 // optional string - Field_TypeUrl = 6 // optional string - Field_OneofIndex = 7 // optional int32 - Field_Packed = 8 // optional bool - Field_Options = 9 // repeated google.protobuf.Option - Field_JsonName = 10 // optional string - Field_DefaultValue = 11 // optional string -) - -// Field numbers for google.protobuf.Enum. -const ( - Enum_Name = 1 // optional string - Enum_Enumvalue = 2 // repeated google.protobuf.EnumValue - Enum_Options = 3 // repeated google.protobuf.Option - Enum_SourceContext = 4 // optional google.protobuf.SourceContext - Enum_Syntax = 5 // optional google.protobuf.Syntax -) - -// Field numbers for google.protobuf.EnumValue. -const ( - EnumValue_Name = 1 // optional string - EnumValue_Number = 2 // optional int32 - EnumValue_Options = 3 // repeated google.protobuf.Option -) - -// Field numbers for google.protobuf.Option. -const ( - Option_Name = 1 // optional string - Option_Value = 2 // optional google.protobuf.Any -) diff --git a/vendor/google.golang.org/protobuf/internal/fieldnum/wrappers_gen.go b/vendor/google.golang.org/protobuf/internal/fieldnum/wrappers_gen.go deleted file mode 100644 index 42f846a9f71..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldnum/wrappers_gen.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by generate-protos. DO NOT EDIT. - -package fieldnum - -// Field numbers for google.protobuf.DoubleValue. -const ( - DoubleValue_Value = 1 // optional double -) - -// Field numbers for google.protobuf.FloatValue. -const ( - FloatValue_Value = 1 // optional float -) - -// Field numbers for google.protobuf.Int64Value. -const ( - Int64Value_Value = 1 // optional int64 -) - -// Field numbers for google.protobuf.UInt64Value. -const ( - UInt64Value_Value = 1 // optional uint64 -) - -// Field numbers for google.protobuf.Int32Value. -const ( - Int32Value_Value = 1 // optional int32 -) - -// Field numbers for google.protobuf.UInt32Value. -const ( - UInt32Value_Value = 1 // optional uint32 -) - -// Field numbers for google.protobuf.BoolValue. -const ( - BoolValue_Value = 1 // optional bool -) - -// Field numbers for google.protobuf.StringValue. -const ( - StringValue_Value = 1 // optional string -) - -// Field numbers for google.protobuf.BytesValue. -const ( - BytesValue_Value = 1 // optional bytes -) diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/build.go b/vendor/google.golang.org/protobuf/internal/filedesc/build.go index 462d384e9f7..d02d770c984 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/build.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/build.go @@ -7,7 +7,7 @@ package filedesc import ( "google.golang.org/protobuf/encoding/protowire" - "google.golang.org/protobuf/internal/fieldnum" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/reflect/protoreflect" pref "google.golang.org/protobuf/reflect/protoreflect" preg "google.golang.org/protobuf/reflect/protoregistry" @@ -126,24 +126,24 @@ func (db *Builder) unmarshalCounts(b []byte, isFile bool) { b = b[m:] if isFile { switch num { - case fieldnum.FileDescriptorProto_EnumType: + case genid.FileDescriptorProto_EnumType_field_number: db.NumEnums++ - case fieldnum.FileDescriptorProto_MessageType: + case genid.FileDescriptorProto_MessageType_field_number: db.unmarshalCounts(v, false) db.NumMessages++ - case fieldnum.FileDescriptorProto_Extension: + case genid.FileDescriptorProto_Extension_field_number: db.NumExtensions++ - case fieldnum.FileDescriptorProto_Service: + case genid.FileDescriptorProto_Service_field_number: db.NumServices++ } } else { switch num { - case fieldnum.DescriptorProto_EnumType: + case genid.DescriptorProto_EnumType_field_number: db.NumEnums++ - case fieldnum.DescriptorProto_NestedType: + case genid.DescriptorProto_NestedType_field_number: db.unmarshalCounts(v, false) db.NumMessages++ - case fieldnum.DescriptorProto_Extension: + case genid.DescriptorProto_Extension_field_number: db.NumExtensions++ } } diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc.go index 2540befd645..9385126fba6 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc.go @@ -13,6 +13,7 @@ import ( "google.golang.org/protobuf/internal/descfmt" "google.golang.org/protobuf/internal/descopts" "google.golang.org/protobuf/internal/encoding/defval" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/internal/strs" pref "google.golang.org/protobuf/reflect/protoreflect" @@ -302,13 +303,13 @@ func (fd *Field) MapKey() pref.FieldDescriptor { if !fd.IsMap() { return nil } - return fd.Message().Fields().ByNumber(1) + return fd.Message().Fields().ByNumber(genid.MapEntry_Key_field_number) } func (fd *Field) MapValue() pref.FieldDescriptor { if !fd.IsMap() { return nil } - return fd.Message().Fields().ByNumber(2) + return fd.Message().Fields().ByNumber(genid.MapEntry_Value_field_number) } func (fd *Field) HasDefault() bool { return fd.L1.Default.has } func (fd *Field) Default() pref.Value { return fd.L1.Default.get(fd) } diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_init.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_init.go index c0cddf86a46..66e1fee5224 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc_init.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_init.go @@ -8,7 +8,7 @@ import ( "sync" "google.golang.org/protobuf/encoding/protowire" - "google.golang.org/protobuf/internal/fieldnum" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/strs" pref "google.golang.org/protobuf/reflect/protoreflect" ) @@ -107,7 +107,7 @@ func (fd *File) unmarshalSeed(b []byte) { v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.FileDescriptorProto_Syntax: + case genid.FileDescriptorProto_Syntax_field_number: switch string(v) { case "proto2": fd.L1.Syntax = pref.Proto2 @@ -116,36 +116,36 @@ func (fd *File) unmarshalSeed(b []byte) { default: panic("invalid syntax") } - case fieldnum.FileDescriptorProto_Name: + case genid.FileDescriptorProto_Name_field_number: fd.L1.Path = sb.MakeString(v) - case fieldnum.FileDescriptorProto_Package: + case genid.FileDescriptorProto_Package_field_number: fd.L1.Package = pref.FullName(sb.MakeString(v)) - case fieldnum.FileDescriptorProto_EnumType: - if prevField != fieldnum.FileDescriptorProto_EnumType { + case genid.FileDescriptorProto_EnumType_field_number: + if prevField != genid.FileDescriptorProto_EnumType_field_number { if numEnums > 0 { panic("non-contiguous repeated field") } posEnums = len(b0) - len(b) - n - m } numEnums++ - case fieldnum.FileDescriptorProto_MessageType: - if prevField != fieldnum.FileDescriptorProto_MessageType { + case genid.FileDescriptorProto_MessageType_field_number: + if prevField != genid.FileDescriptorProto_MessageType_field_number { if numMessages > 0 { panic("non-contiguous repeated field") } posMessages = len(b0) - len(b) - n - m } numMessages++ - case fieldnum.FileDescriptorProto_Extension: - if prevField != fieldnum.FileDescriptorProto_Extension { + case genid.FileDescriptorProto_Extension_field_number: + if prevField != genid.FileDescriptorProto_Extension_field_number { if numExtensions > 0 { panic("non-contiguous repeated field") } posExtensions = len(b0) - len(b) - n - m } numExtensions++ - case fieldnum.FileDescriptorProto_Service: - if prevField != fieldnum.FileDescriptorProto_Service { + case genid.FileDescriptorProto_Service_field_number: + if prevField != genid.FileDescriptorProto_Service_field_number { if numServices > 0 { panic("non-contiguous repeated field") } @@ -233,9 +233,9 @@ func (ed *Enum) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref.Desc v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.EnumDescriptorProto_Name: + case genid.EnumDescriptorProto_Name_field_number: ed.L0.FullName = appendFullName(sb, pd.FullName(), v) - case fieldnum.EnumDescriptorProto_Value: + case genid.EnumDescriptorProto_Value_field_number: numValues++ } default: @@ -260,7 +260,7 @@ func (ed *Enum) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref.Desc v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.EnumDescriptorProto_Value: + case genid.EnumDescriptorProto_Value_field_number: ed.L2.Values.List[i].unmarshalFull(v, sb, pf, ed, i) i++ } @@ -288,33 +288,33 @@ func (md *Message) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref.D v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.DescriptorProto_Name: + case genid.DescriptorProto_Name_field_number: md.L0.FullName = appendFullName(sb, pd.FullName(), v) - case fieldnum.DescriptorProto_EnumType: - if prevField != fieldnum.DescriptorProto_EnumType { + case genid.DescriptorProto_EnumType_field_number: + if prevField != genid.DescriptorProto_EnumType_field_number { if numEnums > 0 { panic("non-contiguous repeated field") } posEnums = len(b0) - len(b) - n - m } numEnums++ - case fieldnum.DescriptorProto_NestedType: - if prevField != fieldnum.DescriptorProto_NestedType { + case genid.DescriptorProto_NestedType_field_number: + if prevField != genid.DescriptorProto_NestedType_field_number { if numMessages > 0 { panic("non-contiguous repeated field") } posMessages = len(b0) - len(b) - n - m } numMessages++ - case fieldnum.DescriptorProto_Extension: - if prevField != fieldnum.DescriptorProto_Extension { + case genid.DescriptorProto_Extension_field_number: + if prevField != genid.DescriptorProto_Extension_field_number { if numExtensions > 0 { panic("non-contiguous repeated field") } posExtensions = len(b0) - len(b) - n - m } numExtensions++ - case fieldnum.DescriptorProto_Options: + case genid.DescriptorProto_Options_field_number: md.unmarshalSeedOptions(v) } prevField = num @@ -375,9 +375,9 @@ func (md *Message) unmarshalSeedOptions(b []byte) { v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.MessageOptions_MapEntry: + case genid.MessageOptions_MapEntry_field_number: md.L1.IsMapEntry = protowire.DecodeBool(v) - case fieldnum.MessageOptions_MessageSetWireFormat: + case genid.MessageOptions_MessageSetWireFormat_field_number: md.L1.IsMessageSet = protowire.DecodeBool(v) } default: @@ -400,20 +400,20 @@ func (xd *Extension) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.FieldDescriptorProto_Number: + case genid.FieldDescriptorProto_Number_field_number: xd.L1.Number = pref.FieldNumber(v) - case fieldnum.FieldDescriptorProto_Label: + case genid.FieldDescriptorProto_Label_field_number: xd.L1.Cardinality = pref.Cardinality(v) - case fieldnum.FieldDescriptorProto_Type: + case genid.FieldDescriptorProto_Type_field_number: xd.L1.Kind = pref.Kind(v) } case protowire.BytesType: v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.FieldDescriptorProto_Name: + case genid.FieldDescriptorProto_Name_field_number: xd.L0.FullName = appendFullName(sb, pd.FullName(), v) - case fieldnum.FieldDescriptorProto_Extendee: + case genid.FieldDescriptorProto_Extendee_field_number: xd.L1.Extendee = PlaceholderMessage(makeFullName(sb, v)) } default: @@ -436,7 +436,7 @@ func (sd *Service) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref.D v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.ServiceDescriptorProto_Name: + case genid.ServiceDescriptorProto_Name_field_number: sd.L0.FullName = appendFullName(sb, pd.FullName(), v) } default: diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go index bc215944a32..e672233e77e 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go @@ -10,7 +10,7 @@ import ( "google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/internal/descopts" - "google.golang.org/protobuf/internal/fieldnum" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/strs" "google.golang.org/protobuf/proto" pref "google.golang.org/protobuf/reflect/protoreflect" @@ -143,35 +143,35 @@ func (fd *File) unmarshalFull(b []byte) { v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.FileDescriptorProto_PublicDependency: + case genid.FileDescriptorProto_PublicDependency_field_number: fd.L2.Imports[v].IsPublic = true - case fieldnum.FileDescriptorProto_WeakDependency: + case genid.FileDescriptorProto_WeakDependency_field_number: fd.L2.Imports[v].IsWeak = true } case protowire.BytesType: v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.FileDescriptorProto_Dependency: + case genid.FileDescriptorProto_Dependency_field_number: path := sb.MakeString(v) imp, _ := fd.builder.FileRegistry.FindFileByPath(path) if imp == nil { imp = PlaceholderFile(path) } fd.L2.Imports = append(fd.L2.Imports, pref.FileImport{FileDescriptor: imp}) - case fieldnum.FileDescriptorProto_EnumType: + case genid.FileDescriptorProto_EnumType_field_number: fd.L1.Enums.List[enumIdx].unmarshalFull(v, sb) enumIdx++ - case fieldnum.FileDescriptorProto_MessageType: + case genid.FileDescriptorProto_MessageType_field_number: fd.L1.Messages.List[messageIdx].unmarshalFull(v, sb) messageIdx++ - case fieldnum.FileDescriptorProto_Extension: + case genid.FileDescriptorProto_Extension_field_number: fd.L1.Extensions.List[extensionIdx].unmarshalFull(v, sb) extensionIdx++ - case fieldnum.FileDescriptorProto_Service: + case genid.FileDescriptorProto_Service_field_number: fd.L1.Services.List[serviceIdx].unmarshalFull(v, sb) serviceIdx++ - case fieldnum.FileDescriptorProto_Options: + case genid.FileDescriptorProto_Options_field_number: rawOptions = appendOptions(rawOptions, v) } default: @@ -196,13 +196,13 @@ func (ed *Enum) unmarshalFull(b []byte, sb *strs.Builder) { v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.EnumDescriptorProto_Value: + case genid.EnumDescriptorProto_Value_field_number: rawValues = append(rawValues, v) - case fieldnum.EnumDescriptorProto_ReservedName: + case genid.EnumDescriptorProto_ReservedName_field_number: ed.L2.ReservedNames.List = append(ed.L2.ReservedNames.List, pref.Name(sb.MakeString(v))) - case fieldnum.EnumDescriptorProto_ReservedRange: + case genid.EnumDescriptorProto_ReservedRange_field_number: ed.L2.ReservedRanges.List = append(ed.L2.ReservedRanges.List, unmarshalEnumReservedRange(v)) - case fieldnum.EnumDescriptorProto_Options: + case genid.EnumDescriptorProto_Options_field_number: rawOptions = appendOptions(rawOptions, v) } default: @@ -228,9 +228,9 @@ func unmarshalEnumReservedRange(b []byte) (r [2]pref.EnumNumber) { v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.EnumDescriptorProto_EnumReservedRange_Start: + case genid.EnumDescriptorProto_EnumReservedRange_Start_field_number: r[0] = pref.EnumNumber(v) - case fieldnum.EnumDescriptorProto_EnumReservedRange_End: + case genid.EnumDescriptorProto_EnumReservedRange_End_field_number: r[1] = pref.EnumNumber(v) } default: @@ -255,17 +255,17 @@ func (vd *EnumValue) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.EnumValueDescriptorProto_Number: + case genid.EnumValueDescriptorProto_Number_field_number: vd.L1.Number = pref.EnumNumber(v) } case protowire.BytesType: v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.EnumValueDescriptorProto_Name: + case genid.EnumValueDescriptorProto_Name_field_number: // NOTE: Enum values are in the same scope as the enum parent. vd.L0.FullName = appendFullName(sb, pd.Parent().FullName(), v) - case fieldnum.EnumValueDescriptorProto_Options: + case genid.EnumValueDescriptorProto_Options_field_number: rawOptions = appendOptions(rawOptions, v) } default: @@ -289,29 +289,29 @@ func (md *Message) unmarshalFull(b []byte, sb *strs.Builder) { v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.DescriptorProto_Field: + case genid.DescriptorProto_Field_field_number: rawFields = append(rawFields, v) - case fieldnum.DescriptorProto_OneofDecl: + case genid.DescriptorProto_OneofDecl_field_number: rawOneofs = append(rawOneofs, v) - case fieldnum.DescriptorProto_ReservedName: + case genid.DescriptorProto_ReservedName_field_number: md.L2.ReservedNames.List = append(md.L2.ReservedNames.List, pref.Name(sb.MakeString(v))) - case fieldnum.DescriptorProto_ReservedRange: + case genid.DescriptorProto_ReservedRange_field_number: md.L2.ReservedRanges.List = append(md.L2.ReservedRanges.List, unmarshalMessageReservedRange(v)) - case fieldnum.DescriptorProto_ExtensionRange: + case genid.DescriptorProto_ExtensionRange_field_number: r, rawOptions := unmarshalMessageExtensionRange(v) opts := md.L0.ParentFile.builder.optionsUnmarshaler(&descopts.ExtensionRange, rawOptions) md.L2.ExtensionRanges.List = append(md.L2.ExtensionRanges.List, r) md.L2.ExtensionRangeOptions = append(md.L2.ExtensionRangeOptions, opts) - case fieldnum.DescriptorProto_EnumType: + case genid.DescriptorProto_EnumType_field_number: md.L1.Enums.List[enumIdx].unmarshalFull(v, sb) enumIdx++ - case fieldnum.DescriptorProto_NestedType: + case genid.DescriptorProto_NestedType_field_number: md.L1.Messages.List[messageIdx].unmarshalFull(v, sb) messageIdx++ - case fieldnum.DescriptorProto_Extension: + case genid.DescriptorProto_Extension_field_number: md.L1.Extensions.List[extensionIdx].unmarshalFull(v, sb) extensionIdx++ - case fieldnum.DescriptorProto_Options: + case genid.DescriptorProto_Options_field_number: md.unmarshalOptions(v) rawOptions = appendOptions(rawOptions, v) } @@ -347,9 +347,9 @@ func (md *Message) unmarshalOptions(b []byte) { v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.MessageOptions_MapEntry: + case genid.MessageOptions_MapEntry_field_number: md.L1.IsMapEntry = protowire.DecodeBool(v) - case fieldnum.MessageOptions_MessageSetWireFormat: + case genid.MessageOptions_MessageSetWireFormat_field_number: md.L1.IsMessageSet = protowire.DecodeBool(v) } default: @@ -368,9 +368,9 @@ func unmarshalMessageReservedRange(b []byte) (r [2]pref.FieldNumber) { v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.DescriptorProto_ReservedRange_Start: + case genid.DescriptorProto_ReservedRange_Start_field_number: r[0] = pref.FieldNumber(v) - case fieldnum.DescriptorProto_ReservedRange_End: + case genid.DescriptorProto_ReservedRange_End_field_number: r[1] = pref.FieldNumber(v) } default: @@ -390,16 +390,16 @@ func unmarshalMessageExtensionRange(b []byte) (r [2]pref.FieldNumber, rawOptions v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.DescriptorProto_ExtensionRange_Start: + case genid.DescriptorProto_ExtensionRange_Start_field_number: r[0] = pref.FieldNumber(v) - case fieldnum.DescriptorProto_ExtensionRange_End: + case genid.DescriptorProto_ExtensionRange_End_field_number: r[1] = pref.FieldNumber(v) } case protowire.BytesType: v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.DescriptorProto_ExtensionRange_Options: + case genid.DescriptorProto_ExtensionRange_Options_field_number: rawOptions = appendOptions(rawOptions, v) } default: @@ -425,13 +425,13 @@ func (fd *Field) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.Des v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.FieldDescriptorProto_Number: + case genid.FieldDescriptorProto_Number_field_number: fd.L1.Number = pref.FieldNumber(v) - case fieldnum.FieldDescriptorProto_Label: + case genid.FieldDescriptorProto_Label_field_number: fd.L1.Cardinality = pref.Cardinality(v) - case fieldnum.FieldDescriptorProto_Type: + case genid.FieldDescriptorProto_Type_field_number: fd.L1.Kind = pref.Kind(v) - case fieldnum.FieldDescriptorProto_OneofIndex: + case genid.FieldDescriptorProto_OneofIndex_field_number: // In Message.unmarshalFull, we allocate slices for both // the field and oneof descriptors before unmarshaling either // of them. This ensures pointers to slice elements are stable. @@ -441,22 +441,22 @@ func (fd *Field) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.Des panic("oneof type already set") } fd.L1.ContainingOneof = od - case fieldnum.FieldDescriptorProto_Proto3Optional: + case genid.FieldDescriptorProto_Proto3Optional_field_number: fd.L1.IsProto3Optional = protowire.DecodeBool(v) } case protowire.BytesType: v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.FieldDescriptorProto_Name: + case genid.FieldDescriptorProto_Name_field_number: fd.L0.FullName = appendFullName(sb, pd.FullName(), v) - case fieldnum.FieldDescriptorProto_JsonName: + case genid.FieldDescriptorProto_JsonName_field_number: fd.L1.JSONName.Init(sb.MakeString(v)) - case fieldnum.FieldDescriptorProto_DefaultValue: + case genid.FieldDescriptorProto_DefaultValue_field_number: fd.L1.Default.val = pref.ValueOfBytes(v) // temporarily store as bytes; later resolved in resolveMessages - case fieldnum.FieldDescriptorProto_TypeName: + case genid.FieldDescriptorProto_TypeName_field_number: rawTypeName = v - case fieldnum.FieldDescriptorProto_Options: + case genid.FieldDescriptorProto_Options_field_number: fd.unmarshalOptions(v) rawOptions = appendOptions(rawOptions, v) } @@ -488,10 +488,10 @@ func (fd *Field) unmarshalOptions(b []byte) { v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.FieldOptions_Packed: + case genid.FieldOptions_Packed_field_number: fd.L1.HasPacked = true fd.L1.IsPacked = protowire.DecodeBool(v) - case fieldnum.FieldOptions_Weak: + case genid.FieldOptions_Weak_field_number: fd.L1.IsWeak = protowire.DecodeBool(v) case FieldOptions_EnforceUTF8: fd.L1.HasEnforceUTF8 = true @@ -518,9 +518,9 @@ func (od *Oneof) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.Des v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.OneofDescriptorProto_Name: + case genid.OneofDescriptorProto_Name_field_number: od.L0.FullName = appendFullName(sb, pd.FullName(), v) - case fieldnum.OneofDescriptorProto_Options: + case genid.OneofDescriptorProto_Options_field_number: rawOptions = appendOptions(rawOptions, v) } default: @@ -543,20 +543,20 @@ func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) { v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.FieldDescriptorProto_Proto3Optional: + case genid.FieldDescriptorProto_Proto3Optional_field_number: xd.L2.IsProto3Optional = protowire.DecodeBool(v) } case protowire.BytesType: v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.FieldDescriptorProto_JsonName: + case genid.FieldDescriptorProto_JsonName_field_number: xd.L2.JSONName.Init(sb.MakeString(v)) - case fieldnum.FieldDescriptorProto_DefaultValue: + case genid.FieldDescriptorProto_DefaultValue_field_number: xd.L2.Default.val = pref.ValueOfBytes(v) // temporarily store as bytes; later resolved in resolveExtensions - case fieldnum.FieldDescriptorProto_TypeName: + case genid.FieldDescriptorProto_TypeName_field_number: rawTypeName = v - case fieldnum.FieldDescriptorProto_Options: + case genid.FieldDescriptorProto_Options_field_number: xd.unmarshalOptions(v) rawOptions = appendOptions(rawOptions, v) } @@ -586,7 +586,7 @@ func (xd *Extension) unmarshalOptions(b []byte) { v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.FieldOptions_Packed: + case genid.FieldOptions_Packed_field_number: xd.L2.IsPacked = protowire.DecodeBool(v) } default: @@ -608,9 +608,9 @@ func (sd *Service) unmarshalFull(b []byte, sb *strs.Builder) { v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.ServiceDescriptorProto_Method: + case genid.ServiceDescriptorProto_Method_field_number: rawMethods = append(rawMethods, v) - case fieldnum.ServiceDescriptorProto_Options: + case genid.ServiceDescriptorProto_Options_field_number: rawOptions = appendOptions(rawOptions, v) } default: @@ -641,22 +641,22 @@ func (md *Method) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.De v, m := protowire.ConsumeVarint(b) b = b[m:] switch num { - case fieldnum.MethodDescriptorProto_ClientStreaming: + case genid.MethodDescriptorProto_ClientStreaming_field_number: md.L1.IsStreamingClient = protowire.DecodeBool(v) - case fieldnum.MethodDescriptorProto_ServerStreaming: + case genid.MethodDescriptorProto_ServerStreaming_field_number: md.L1.IsStreamingServer = protowire.DecodeBool(v) } case protowire.BytesType: v, m := protowire.ConsumeBytes(b) b = b[m:] switch num { - case fieldnum.MethodDescriptorProto_Name: + case genid.MethodDescriptorProto_Name_field_number: md.L0.FullName = appendFullName(sb, pd.FullName(), v) - case fieldnum.MethodDescriptorProto_InputType: + case genid.MethodDescriptorProto_InputType_field_number: md.L1.Input = PlaceholderMessage(makeFullName(sb, v)) - case fieldnum.MethodDescriptorProto_OutputType: + case genid.MethodDescriptorProto_OutputType_field_number: md.L1.Output = PlaceholderMessage(makeFullName(sb, v)) - case fieldnum.MethodDescriptorProto_Options: + case genid.MethodDescriptorProto_Options_field_number: rawOptions = appendOptions(rawOptions, v) } default: diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go index 1b7089b6434..c876cd34d70 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go @@ -6,7 +6,6 @@ package filedesc import ( "fmt" - "math" "sort" "sync" @@ -185,10 +184,7 @@ func (p *FieldRanges) CheckValid(isMessageSet bool) error { // Unlike the FieldNumber.IsValid method, it allows ranges that cover the // reserved number range. func isValidFieldNumber(n protoreflect.FieldNumber, isMessageSet bool) bool { - if isMessageSet { - return protowire.MinValidNumber <= n && n <= math.MaxInt32 - } - return protowire.MinValidNumber <= n && n <= protowire.MaxValidNumber + return protowire.MinValidNumber <= n && (n <= protowire.MaxValidNumber || isMessageSet) } // CheckOverlap reports an error if p and q overlap. diff --git a/vendor/google.golang.org/protobuf/internal/genid/any_gen.go b/vendor/google.golang.org/protobuf/internal/genid/any_gen.go new file mode 100644 index 00000000000..e6f7d47ab6d --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/any_gen.go @@ -0,0 +1,34 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_any_proto = "google/protobuf/any.proto" + +// Names for google.protobuf.Any. +const ( + Any_message_name protoreflect.Name = "Any" + Any_message_fullname protoreflect.FullName = "google.protobuf.Any" +) + +// Field names for google.protobuf.Any. +const ( + Any_TypeUrl_field_name protoreflect.Name = "type_url" + Any_Value_field_name protoreflect.Name = "value" + + Any_TypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Any.type_url" + Any_Value_field_fullname protoreflect.FullName = "google.protobuf.Any.value" +) + +// Field numbers for google.protobuf.Any. +const ( + Any_TypeUrl_field_number protoreflect.FieldNumber = 1 + Any_Value_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/api_gen.go b/vendor/google.golang.org/protobuf/internal/genid/api_gen.go new file mode 100644 index 00000000000..df8f9185013 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/api_gen.go @@ -0,0 +1,106 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_api_proto = "google/protobuf/api.proto" + +// Names for google.protobuf.Api. +const ( + Api_message_name protoreflect.Name = "Api" + Api_message_fullname protoreflect.FullName = "google.protobuf.Api" +) + +// Field names for google.protobuf.Api. +const ( + Api_Name_field_name protoreflect.Name = "name" + Api_Methods_field_name protoreflect.Name = "methods" + Api_Options_field_name protoreflect.Name = "options" + Api_Version_field_name protoreflect.Name = "version" + Api_SourceContext_field_name protoreflect.Name = "source_context" + Api_Mixins_field_name protoreflect.Name = "mixins" + Api_Syntax_field_name protoreflect.Name = "syntax" + + Api_Name_field_fullname protoreflect.FullName = "google.protobuf.Api.name" + Api_Methods_field_fullname protoreflect.FullName = "google.protobuf.Api.methods" + Api_Options_field_fullname protoreflect.FullName = "google.protobuf.Api.options" + Api_Version_field_fullname protoreflect.FullName = "google.protobuf.Api.version" + Api_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Api.source_context" + Api_Mixins_field_fullname protoreflect.FullName = "google.protobuf.Api.mixins" + Api_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Api.syntax" +) + +// Field numbers for google.protobuf.Api. +const ( + Api_Name_field_number protoreflect.FieldNumber = 1 + Api_Methods_field_number protoreflect.FieldNumber = 2 + Api_Options_field_number protoreflect.FieldNumber = 3 + Api_Version_field_number protoreflect.FieldNumber = 4 + Api_SourceContext_field_number protoreflect.FieldNumber = 5 + Api_Mixins_field_number protoreflect.FieldNumber = 6 + Api_Syntax_field_number protoreflect.FieldNumber = 7 +) + +// Names for google.protobuf.Method. +const ( + Method_message_name protoreflect.Name = "Method" + Method_message_fullname protoreflect.FullName = "google.protobuf.Method" +) + +// Field names for google.protobuf.Method. +const ( + Method_Name_field_name protoreflect.Name = "name" + Method_RequestTypeUrl_field_name protoreflect.Name = "request_type_url" + Method_RequestStreaming_field_name protoreflect.Name = "request_streaming" + Method_ResponseTypeUrl_field_name protoreflect.Name = "response_type_url" + Method_ResponseStreaming_field_name protoreflect.Name = "response_streaming" + Method_Options_field_name protoreflect.Name = "options" + Method_Syntax_field_name protoreflect.Name = "syntax" + + Method_Name_field_fullname protoreflect.FullName = "google.protobuf.Method.name" + Method_RequestTypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Method.request_type_url" + Method_RequestStreaming_field_fullname protoreflect.FullName = "google.protobuf.Method.request_streaming" + Method_ResponseTypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Method.response_type_url" + Method_ResponseStreaming_field_fullname protoreflect.FullName = "google.protobuf.Method.response_streaming" + Method_Options_field_fullname protoreflect.FullName = "google.protobuf.Method.options" + Method_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Method.syntax" +) + +// Field numbers for google.protobuf.Method. +const ( + Method_Name_field_number protoreflect.FieldNumber = 1 + Method_RequestTypeUrl_field_number protoreflect.FieldNumber = 2 + Method_RequestStreaming_field_number protoreflect.FieldNumber = 3 + Method_ResponseTypeUrl_field_number protoreflect.FieldNumber = 4 + Method_ResponseStreaming_field_number protoreflect.FieldNumber = 5 + Method_Options_field_number protoreflect.FieldNumber = 6 + Method_Syntax_field_number protoreflect.FieldNumber = 7 +) + +// Names for google.protobuf.Mixin. +const ( + Mixin_message_name protoreflect.Name = "Mixin" + Mixin_message_fullname protoreflect.FullName = "google.protobuf.Mixin" +) + +// Field names for google.protobuf.Mixin. +const ( + Mixin_Name_field_name protoreflect.Name = "name" + Mixin_Root_field_name protoreflect.Name = "root" + + Mixin_Name_field_fullname protoreflect.FullName = "google.protobuf.Mixin.name" + Mixin_Root_field_fullname protoreflect.FullName = "google.protobuf.Mixin.root" +) + +// Field numbers for google.protobuf.Mixin. +const ( + Mixin_Name_field_number protoreflect.FieldNumber = 1 + Mixin_Root_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go b/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go new file mode 100644 index 00000000000..e3cdf1c2059 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go @@ -0,0 +1,829 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_descriptor_proto = "google/protobuf/descriptor.proto" + +// Names for google.protobuf.FileDescriptorSet. +const ( + FileDescriptorSet_message_name protoreflect.Name = "FileDescriptorSet" + FileDescriptorSet_message_fullname protoreflect.FullName = "google.protobuf.FileDescriptorSet" +) + +// Field names for google.protobuf.FileDescriptorSet. +const ( + FileDescriptorSet_File_field_name protoreflect.Name = "file" + + FileDescriptorSet_File_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorSet.file" +) + +// Field numbers for google.protobuf.FileDescriptorSet. +const ( + FileDescriptorSet_File_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.FileDescriptorProto. +const ( + FileDescriptorProto_message_name protoreflect.Name = "FileDescriptorProto" + FileDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto" +) + +// Field names for google.protobuf.FileDescriptorProto. +const ( + FileDescriptorProto_Name_field_name protoreflect.Name = "name" + FileDescriptorProto_Package_field_name protoreflect.Name = "package" + FileDescriptorProto_Dependency_field_name protoreflect.Name = "dependency" + FileDescriptorProto_PublicDependency_field_name protoreflect.Name = "public_dependency" + FileDescriptorProto_WeakDependency_field_name protoreflect.Name = "weak_dependency" + FileDescriptorProto_MessageType_field_name protoreflect.Name = "message_type" + FileDescriptorProto_EnumType_field_name protoreflect.Name = "enum_type" + FileDescriptorProto_Service_field_name protoreflect.Name = "service" + FileDescriptorProto_Extension_field_name protoreflect.Name = "extension" + FileDescriptorProto_Options_field_name protoreflect.Name = "options" + FileDescriptorProto_SourceCodeInfo_field_name protoreflect.Name = "source_code_info" + FileDescriptorProto_Syntax_field_name protoreflect.Name = "syntax" + + FileDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.name" + FileDescriptorProto_Package_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.package" + FileDescriptorProto_Dependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.dependency" + FileDescriptorProto_PublicDependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.public_dependency" + FileDescriptorProto_WeakDependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.weak_dependency" + FileDescriptorProto_MessageType_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.message_type" + FileDescriptorProto_EnumType_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.enum_type" + FileDescriptorProto_Service_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.service" + FileDescriptorProto_Extension_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.extension" + FileDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.options" + FileDescriptorProto_SourceCodeInfo_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.source_code_info" + FileDescriptorProto_Syntax_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.syntax" +) + +// Field numbers for google.protobuf.FileDescriptorProto. +const ( + FileDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + FileDescriptorProto_Package_field_number protoreflect.FieldNumber = 2 + FileDescriptorProto_Dependency_field_number protoreflect.FieldNumber = 3 + FileDescriptorProto_PublicDependency_field_number protoreflect.FieldNumber = 10 + FileDescriptorProto_WeakDependency_field_number protoreflect.FieldNumber = 11 + FileDescriptorProto_MessageType_field_number protoreflect.FieldNumber = 4 + FileDescriptorProto_EnumType_field_number protoreflect.FieldNumber = 5 + FileDescriptorProto_Service_field_number protoreflect.FieldNumber = 6 + FileDescriptorProto_Extension_field_number protoreflect.FieldNumber = 7 + FileDescriptorProto_Options_field_number protoreflect.FieldNumber = 8 + FileDescriptorProto_SourceCodeInfo_field_number protoreflect.FieldNumber = 9 + FileDescriptorProto_Syntax_field_number protoreflect.FieldNumber = 12 +) + +// Names for google.protobuf.DescriptorProto. +const ( + DescriptorProto_message_name protoreflect.Name = "DescriptorProto" + DescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.DescriptorProto" +) + +// Field names for google.protobuf.DescriptorProto. +const ( + DescriptorProto_Name_field_name protoreflect.Name = "name" + DescriptorProto_Field_field_name protoreflect.Name = "field" + DescriptorProto_Extension_field_name protoreflect.Name = "extension" + DescriptorProto_NestedType_field_name protoreflect.Name = "nested_type" + DescriptorProto_EnumType_field_name protoreflect.Name = "enum_type" + DescriptorProto_ExtensionRange_field_name protoreflect.Name = "extension_range" + DescriptorProto_OneofDecl_field_name protoreflect.Name = "oneof_decl" + DescriptorProto_Options_field_name protoreflect.Name = "options" + DescriptorProto_ReservedRange_field_name protoreflect.Name = "reserved_range" + DescriptorProto_ReservedName_field_name protoreflect.Name = "reserved_name" + + DescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.name" + DescriptorProto_Field_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.field" + DescriptorProto_Extension_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.extension" + DescriptorProto_NestedType_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.nested_type" + DescriptorProto_EnumType_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.enum_type" + DescriptorProto_ExtensionRange_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.extension_range" + DescriptorProto_OneofDecl_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.oneof_decl" + DescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.options" + DescriptorProto_ReservedRange_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.reserved_range" + DescriptorProto_ReservedName_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.reserved_name" +) + +// Field numbers for google.protobuf.DescriptorProto. +const ( + DescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + DescriptorProto_Field_field_number protoreflect.FieldNumber = 2 + DescriptorProto_Extension_field_number protoreflect.FieldNumber = 6 + DescriptorProto_NestedType_field_number protoreflect.FieldNumber = 3 + DescriptorProto_EnumType_field_number protoreflect.FieldNumber = 4 + DescriptorProto_ExtensionRange_field_number protoreflect.FieldNumber = 5 + DescriptorProto_OneofDecl_field_number protoreflect.FieldNumber = 8 + DescriptorProto_Options_field_number protoreflect.FieldNumber = 7 + DescriptorProto_ReservedRange_field_number protoreflect.FieldNumber = 9 + DescriptorProto_ReservedName_field_number protoreflect.FieldNumber = 10 +) + +// Names for google.protobuf.DescriptorProto.ExtensionRange. +const ( + DescriptorProto_ExtensionRange_message_name protoreflect.Name = "ExtensionRange" + DescriptorProto_ExtensionRange_message_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ExtensionRange" +) + +// Field names for google.protobuf.DescriptorProto.ExtensionRange. +const ( + DescriptorProto_ExtensionRange_Start_field_name protoreflect.Name = "start" + DescriptorProto_ExtensionRange_End_field_name protoreflect.Name = "end" + DescriptorProto_ExtensionRange_Options_field_name protoreflect.Name = "options" + + DescriptorProto_ExtensionRange_Start_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ExtensionRange.start" + DescriptorProto_ExtensionRange_End_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ExtensionRange.end" + DescriptorProto_ExtensionRange_Options_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ExtensionRange.options" +) + +// Field numbers for google.protobuf.DescriptorProto.ExtensionRange. +const ( + DescriptorProto_ExtensionRange_Start_field_number protoreflect.FieldNumber = 1 + DescriptorProto_ExtensionRange_End_field_number protoreflect.FieldNumber = 2 + DescriptorProto_ExtensionRange_Options_field_number protoreflect.FieldNumber = 3 +) + +// Names for google.protobuf.DescriptorProto.ReservedRange. +const ( + DescriptorProto_ReservedRange_message_name protoreflect.Name = "ReservedRange" + DescriptorProto_ReservedRange_message_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ReservedRange" +) + +// Field names for google.protobuf.DescriptorProto.ReservedRange. +const ( + DescriptorProto_ReservedRange_Start_field_name protoreflect.Name = "start" + DescriptorProto_ReservedRange_End_field_name protoreflect.Name = "end" + + DescriptorProto_ReservedRange_Start_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ReservedRange.start" + DescriptorProto_ReservedRange_End_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ReservedRange.end" +) + +// Field numbers for google.protobuf.DescriptorProto.ReservedRange. +const ( + DescriptorProto_ReservedRange_Start_field_number protoreflect.FieldNumber = 1 + DescriptorProto_ReservedRange_End_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.ExtensionRangeOptions. +const ( + ExtensionRangeOptions_message_name protoreflect.Name = "ExtensionRangeOptions" + ExtensionRangeOptions_message_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions" +) + +// Field names for google.protobuf.ExtensionRangeOptions. +const ( + ExtensionRangeOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + ExtensionRangeOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.ExtensionRangeOptions. +const ( + ExtensionRangeOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.FieldDescriptorProto. +const ( + FieldDescriptorProto_message_name protoreflect.Name = "FieldDescriptorProto" + FieldDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto" +) + +// Field names for google.protobuf.FieldDescriptorProto. +const ( + FieldDescriptorProto_Name_field_name protoreflect.Name = "name" + FieldDescriptorProto_Number_field_name protoreflect.Name = "number" + FieldDescriptorProto_Label_field_name protoreflect.Name = "label" + FieldDescriptorProto_Type_field_name protoreflect.Name = "type" + FieldDescriptorProto_TypeName_field_name protoreflect.Name = "type_name" + FieldDescriptorProto_Extendee_field_name protoreflect.Name = "extendee" + FieldDescriptorProto_DefaultValue_field_name protoreflect.Name = "default_value" + FieldDescriptorProto_OneofIndex_field_name protoreflect.Name = "oneof_index" + FieldDescriptorProto_JsonName_field_name protoreflect.Name = "json_name" + FieldDescriptorProto_Options_field_name protoreflect.Name = "options" + FieldDescriptorProto_Proto3Optional_field_name protoreflect.Name = "proto3_optional" + + FieldDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.name" + FieldDescriptorProto_Number_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.number" + FieldDescriptorProto_Label_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.label" + FieldDescriptorProto_Type_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.type" + FieldDescriptorProto_TypeName_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.type_name" + FieldDescriptorProto_Extendee_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.extendee" + FieldDescriptorProto_DefaultValue_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.default_value" + FieldDescriptorProto_OneofIndex_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.oneof_index" + FieldDescriptorProto_JsonName_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.json_name" + FieldDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.options" + FieldDescriptorProto_Proto3Optional_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.proto3_optional" +) + +// Field numbers for google.protobuf.FieldDescriptorProto. +const ( + FieldDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + FieldDescriptorProto_Number_field_number protoreflect.FieldNumber = 3 + FieldDescriptorProto_Label_field_number protoreflect.FieldNumber = 4 + FieldDescriptorProto_Type_field_number protoreflect.FieldNumber = 5 + FieldDescriptorProto_TypeName_field_number protoreflect.FieldNumber = 6 + FieldDescriptorProto_Extendee_field_number protoreflect.FieldNumber = 2 + FieldDescriptorProto_DefaultValue_field_number protoreflect.FieldNumber = 7 + FieldDescriptorProto_OneofIndex_field_number protoreflect.FieldNumber = 9 + FieldDescriptorProto_JsonName_field_number protoreflect.FieldNumber = 10 + FieldDescriptorProto_Options_field_number protoreflect.FieldNumber = 8 + FieldDescriptorProto_Proto3Optional_field_number protoreflect.FieldNumber = 17 +) + +// Full and short names for google.protobuf.FieldDescriptorProto.Type. +const ( + FieldDescriptorProto_Type_enum_fullname = "google.protobuf.FieldDescriptorProto.Type" + FieldDescriptorProto_Type_enum_name = "Type" +) + +// Full and short names for google.protobuf.FieldDescriptorProto.Label. +const ( + FieldDescriptorProto_Label_enum_fullname = "google.protobuf.FieldDescriptorProto.Label" + FieldDescriptorProto_Label_enum_name = "Label" +) + +// Names for google.protobuf.OneofDescriptorProto. +const ( + OneofDescriptorProto_message_name protoreflect.Name = "OneofDescriptorProto" + OneofDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.OneofDescriptorProto" +) + +// Field names for google.protobuf.OneofDescriptorProto. +const ( + OneofDescriptorProto_Name_field_name protoreflect.Name = "name" + OneofDescriptorProto_Options_field_name protoreflect.Name = "options" + + OneofDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.OneofDescriptorProto.name" + OneofDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.OneofDescriptorProto.options" +) + +// Field numbers for google.protobuf.OneofDescriptorProto. +const ( + OneofDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + OneofDescriptorProto_Options_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.EnumDescriptorProto. +const ( + EnumDescriptorProto_message_name protoreflect.Name = "EnumDescriptorProto" + EnumDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto" +) + +// Field names for google.protobuf.EnumDescriptorProto. +const ( + EnumDescriptorProto_Name_field_name protoreflect.Name = "name" + EnumDescriptorProto_Value_field_name protoreflect.Name = "value" + EnumDescriptorProto_Options_field_name protoreflect.Name = "options" + EnumDescriptorProto_ReservedRange_field_name protoreflect.Name = "reserved_range" + EnumDescriptorProto_ReservedName_field_name protoreflect.Name = "reserved_name" + + EnumDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.name" + EnumDescriptorProto_Value_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.value" + EnumDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.options" + EnumDescriptorProto_ReservedRange_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.reserved_range" + EnumDescriptorProto_ReservedName_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.reserved_name" +) + +// Field numbers for google.protobuf.EnumDescriptorProto. +const ( + EnumDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + EnumDescriptorProto_Value_field_number protoreflect.FieldNumber = 2 + EnumDescriptorProto_Options_field_number protoreflect.FieldNumber = 3 + EnumDescriptorProto_ReservedRange_field_number protoreflect.FieldNumber = 4 + EnumDescriptorProto_ReservedName_field_number protoreflect.FieldNumber = 5 +) + +// Names for google.protobuf.EnumDescriptorProto.EnumReservedRange. +const ( + EnumDescriptorProto_EnumReservedRange_message_name protoreflect.Name = "EnumReservedRange" + EnumDescriptorProto_EnumReservedRange_message_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.EnumReservedRange" +) + +// Field names for google.protobuf.EnumDescriptorProto.EnumReservedRange. +const ( + EnumDescriptorProto_EnumReservedRange_Start_field_name protoreflect.Name = "start" + EnumDescriptorProto_EnumReservedRange_End_field_name protoreflect.Name = "end" + + EnumDescriptorProto_EnumReservedRange_Start_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.EnumReservedRange.start" + EnumDescriptorProto_EnumReservedRange_End_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.EnumReservedRange.end" +) + +// Field numbers for google.protobuf.EnumDescriptorProto.EnumReservedRange. +const ( + EnumDescriptorProto_EnumReservedRange_Start_field_number protoreflect.FieldNumber = 1 + EnumDescriptorProto_EnumReservedRange_End_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.EnumValueDescriptorProto. +const ( + EnumValueDescriptorProto_message_name protoreflect.Name = "EnumValueDescriptorProto" + EnumValueDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.EnumValueDescriptorProto" +) + +// Field names for google.protobuf.EnumValueDescriptorProto. +const ( + EnumValueDescriptorProto_Name_field_name protoreflect.Name = "name" + EnumValueDescriptorProto_Number_field_name protoreflect.Name = "number" + EnumValueDescriptorProto_Options_field_name protoreflect.Name = "options" + + EnumValueDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.EnumValueDescriptorProto.name" + EnumValueDescriptorProto_Number_field_fullname protoreflect.FullName = "google.protobuf.EnumValueDescriptorProto.number" + EnumValueDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.EnumValueDescriptorProto.options" +) + +// Field numbers for google.protobuf.EnumValueDescriptorProto. +const ( + EnumValueDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + EnumValueDescriptorProto_Number_field_number protoreflect.FieldNumber = 2 + EnumValueDescriptorProto_Options_field_number protoreflect.FieldNumber = 3 +) + +// Names for google.protobuf.ServiceDescriptorProto. +const ( + ServiceDescriptorProto_message_name protoreflect.Name = "ServiceDescriptorProto" + ServiceDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.ServiceDescriptorProto" +) + +// Field names for google.protobuf.ServiceDescriptorProto. +const ( + ServiceDescriptorProto_Name_field_name protoreflect.Name = "name" + ServiceDescriptorProto_Method_field_name protoreflect.Name = "method" + ServiceDescriptorProto_Options_field_name protoreflect.Name = "options" + + ServiceDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.ServiceDescriptorProto.name" + ServiceDescriptorProto_Method_field_fullname protoreflect.FullName = "google.protobuf.ServiceDescriptorProto.method" + ServiceDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.ServiceDescriptorProto.options" +) + +// Field numbers for google.protobuf.ServiceDescriptorProto. +const ( + ServiceDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + ServiceDescriptorProto_Method_field_number protoreflect.FieldNumber = 2 + ServiceDescriptorProto_Options_field_number protoreflect.FieldNumber = 3 +) + +// Names for google.protobuf.MethodDescriptorProto. +const ( + MethodDescriptorProto_message_name protoreflect.Name = "MethodDescriptorProto" + MethodDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto" +) + +// Field names for google.protobuf.MethodDescriptorProto. +const ( + MethodDescriptorProto_Name_field_name protoreflect.Name = "name" + MethodDescriptorProto_InputType_field_name protoreflect.Name = "input_type" + MethodDescriptorProto_OutputType_field_name protoreflect.Name = "output_type" + MethodDescriptorProto_Options_field_name protoreflect.Name = "options" + MethodDescriptorProto_ClientStreaming_field_name protoreflect.Name = "client_streaming" + MethodDescriptorProto_ServerStreaming_field_name protoreflect.Name = "server_streaming" + + MethodDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.name" + MethodDescriptorProto_InputType_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.input_type" + MethodDescriptorProto_OutputType_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.output_type" + MethodDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.options" + MethodDescriptorProto_ClientStreaming_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.client_streaming" + MethodDescriptorProto_ServerStreaming_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.server_streaming" +) + +// Field numbers for google.protobuf.MethodDescriptorProto. +const ( + MethodDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + MethodDescriptorProto_InputType_field_number protoreflect.FieldNumber = 2 + MethodDescriptorProto_OutputType_field_number protoreflect.FieldNumber = 3 + MethodDescriptorProto_Options_field_number protoreflect.FieldNumber = 4 + MethodDescriptorProto_ClientStreaming_field_number protoreflect.FieldNumber = 5 + MethodDescriptorProto_ServerStreaming_field_number protoreflect.FieldNumber = 6 +) + +// Names for google.protobuf.FileOptions. +const ( + FileOptions_message_name protoreflect.Name = "FileOptions" + FileOptions_message_fullname protoreflect.FullName = "google.protobuf.FileOptions" +) + +// Field names for google.protobuf.FileOptions. +const ( + FileOptions_JavaPackage_field_name protoreflect.Name = "java_package" + FileOptions_JavaOuterClassname_field_name protoreflect.Name = "java_outer_classname" + FileOptions_JavaMultipleFiles_field_name protoreflect.Name = "java_multiple_files" + FileOptions_JavaGenerateEqualsAndHash_field_name protoreflect.Name = "java_generate_equals_and_hash" + FileOptions_JavaStringCheckUtf8_field_name protoreflect.Name = "java_string_check_utf8" + FileOptions_OptimizeFor_field_name protoreflect.Name = "optimize_for" + FileOptions_GoPackage_field_name protoreflect.Name = "go_package" + FileOptions_CcGenericServices_field_name protoreflect.Name = "cc_generic_services" + FileOptions_JavaGenericServices_field_name protoreflect.Name = "java_generic_services" + FileOptions_PyGenericServices_field_name protoreflect.Name = "py_generic_services" + FileOptions_PhpGenericServices_field_name protoreflect.Name = "php_generic_services" + FileOptions_Deprecated_field_name protoreflect.Name = "deprecated" + FileOptions_CcEnableArenas_field_name protoreflect.Name = "cc_enable_arenas" + FileOptions_ObjcClassPrefix_field_name protoreflect.Name = "objc_class_prefix" + FileOptions_CsharpNamespace_field_name protoreflect.Name = "csharp_namespace" + FileOptions_SwiftPrefix_field_name protoreflect.Name = "swift_prefix" + FileOptions_PhpClassPrefix_field_name protoreflect.Name = "php_class_prefix" + FileOptions_PhpNamespace_field_name protoreflect.Name = "php_namespace" + FileOptions_PhpMetadataNamespace_field_name protoreflect.Name = "php_metadata_namespace" + FileOptions_RubyPackage_field_name protoreflect.Name = "ruby_package" + FileOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + FileOptions_JavaPackage_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_package" + FileOptions_JavaOuterClassname_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_outer_classname" + FileOptions_JavaMultipleFiles_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_multiple_files" + FileOptions_JavaGenerateEqualsAndHash_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_generate_equals_and_hash" + FileOptions_JavaStringCheckUtf8_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_string_check_utf8" + FileOptions_OptimizeFor_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.optimize_for" + FileOptions_GoPackage_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.go_package" + FileOptions_CcGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.cc_generic_services" + FileOptions_JavaGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_generic_services" + FileOptions_PyGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.py_generic_services" + FileOptions_PhpGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_generic_services" + FileOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.deprecated" + FileOptions_CcEnableArenas_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.cc_enable_arenas" + FileOptions_ObjcClassPrefix_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.objc_class_prefix" + FileOptions_CsharpNamespace_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.csharp_namespace" + FileOptions_SwiftPrefix_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.swift_prefix" + FileOptions_PhpClassPrefix_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_class_prefix" + FileOptions_PhpNamespace_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_namespace" + FileOptions_PhpMetadataNamespace_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_metadata_namespace" + FileOptions_RubyPackage_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.ruby_package" + FileOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.FileOptions. +const ( + FileOptions_JavaPackage_field_number protoreflect.FieldNumber = 1 + FileOptions_JavaOuterClassname_field_number protoreflect.FieldNumber = 8 + FileOptions_JavaMultipleFiles_field_number protoreflect.FieldNumber = 10 + FileOptions_JavaGenerateEqualsAndHash_field_number protoreflect.FieldNumber = 20 + FileOptions_JavaStringCheckUtf8_field_number protoreflect.FieldNumber = 27 + FileOptions_OptimizeFor_field_number protoreflect.FieldNumber = 9 + FileOptions_GoPackage_field_number protoreflect.FieldNumber = 11 + FileOptions_CcGenericServices_field_number protoreflect.FieldNumber = 16 + FileOptions_JavaGenericServices_field_number protoreflect.FieldNumber = 17 + FileOptions_PyGenericServices_field_number protoreflect.FieldNumber = 18 + FileOptions_PhpGenericServices_field_number protoreflect.FieldNumber = 42 + FileOptions_Deprecated_field_number protoreflect.FieldNumber = 23 + FileOptions_CcEnableArenas_field_number protoreflect.FieldNumber = 31 + FileOptions_ObjcClassPrefix_field_number protoreflect.FieldNumber = 36 + FileOptions_CsharpNamespace_field_number protoreflect.FieldNumber = 37 + FileOptions_SwiftPrefix_field_number protoreflect.FieldNumber = 39 + FileOptions_PhpClassPrefix_field_number protoreflect.FieldNumber = 40 + FileOptions_PhpNamespace_field_number protoreflect.FieldNumber = 41 + FileOptions_PhpMetadataNamespace_field_number protoreflect.FieldNumber = 44 + FileOptions_RubyPackage_field_number protoreflect.FieldNumber = 45 + FileOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Full and short names for google.protobuf.FileOptions.OptimizeMode. +const ( + FileOptions_OptimizeMode_enum_fullname = "google.protobuf.FileOptions.OptimizeMode" + FileOptions_OptimizeMode_enum_name = "OptimizeMode" +) + +// Names for google.protobuf.MessageOptions. +const ( + MessageOptions_message_name protoreflect.Name = "MessageOptions" + MessageOptions_message_fullname protoreflect.FullName = "google.protobuf.MessageOptions" +) + +// Field names for google.protobuf.MessageOptions. +const ( + MessageOptions_MessageSetWireFormat_field_name protoreflect.Name = "message_set_wire_format" + MessageOptions_NoStandardDescriptorAccessor_field_name protoreflect.Name = "no_standard_descriptor_accessor" + MessageOptions_Deprecated_field_name protoreflect.Name = "deprecated" + MessageOptions_MapEntry_field_name protoreflect.Name = "map_entry" + MessageOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + MessageOptions_MessageSetWireFormat_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.message_set_wire_format" + MessageOptions_NoStandardDescriptorAccessor_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.no_standard_descriptor_accessor" + MessageOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.deprecated" + MessageOptions_MapEntry_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.map_entry" + MessageOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.MessageOptions. +const ( + MessageOptions_MessageSetWireFormat_field_number protoreflect.FieldNumber = 1 + MessageOptions_NoStandardDescriptorAccessor_field_number protoreflect.FieldNumber = 2 + MessageOptions_Deprecated_field_number protoreflect.FieldNumber = 3 + MessageOptions_MapEntry_field_number protoreflect.FieldNumber = 7 + MessageOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.FieldOptions. +const ( + FieldOptions_message_name protoreflect.Name = "FieldOptions" + FieldOptions_message_fullname protoreflect.FullName = "google.protobuf.FieldOptions" +) + +// Field names for google.protobuf.FieldOptions. +const ( + FieldOptions_Ctype_field_name protoreflect.Name = "ctype" + FieldOptions_Packed_field_name protoreflect.Name = "packed" + FieldOptions_Jstype_field_name protoreflect.Name = "jstype" + FieldOptions_Lazy_field_name protoreflect.Name = "lazy" + FieldOptions_Deprecated_field_name protoreflect.Name = "deprecated" + FieldOptions_Weak_field_name protoreflect.Name = "weak" + FieldOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + FieldOptions_Ctype_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.ctype" + FieldOptions_Packed_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.packed" + FieldOptions_Jstype_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.jstype" + FieldOptions_Lazy_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.lazy" + FieldOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.deprecated" + FieldOptions_Weak_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.weak" + FieldOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.FieldOptions. +const ( + FieldOptions_Ctype_field_number protoreflect.FieldNumber = 1 + FieldOptions_Packed_field_number protoreflect.FieldNumber = 2 + FieldOptions_Jstype_field_number protoreflect.FieldNumber = 6 + FieldOptions_Lazy_field_number protoreflect.FieldNumber = 5 + FieldOptions_Deprecated_field_number protoreflect.FieldNumber = 3 + FieldOptions_Weak_field_number protoreflect.FieldNumber = 10 + FieldOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Full and short names for google.protobuf.FieldOptions.CType. +const ( + FieldOptions_CType_enum_fullname = "google.protobuf.FieldOptions.CType" + FieldOptions_CType_enum_name = "CType" +) + +// Full and short names for google.protobuf.FieldOptions.JSType. +const ( + FieldOptions_JSType_enum_fullname = "google.protobuf.FieldOptions.JSType" + FieldOptions_JSType_enum_name = "JSType" +) + +// Names for google.protobuf.OneofOptions. +const ( + OneofOptions_message_name protoreflect.Name = "OneofOptions" + OneofOptions_message_fullname protoreflect.FullName = "google.protobuf.OneofOptions" +) + +// Field names for google.protobuf.OneofOptions. +const ( + OneofOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + OneofOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.OneofOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.OneofOptions. +const ( + OneofOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.EnumOptions. +const ( + EnumOptions_message_name protoreflect.Name = "EnumOptions" + EnumOptions_message_fullname protoreflect.FullName = "google.protobuf.EnumOptions" +) + +// Field names for google.protobuf.EnumOptions. +const ( + EnumOptions_AllowAlias_field_name protoreflect.Name = "allow_alias" + EnumOptions_Deprecated_field_name protoreflect.Name = "deprecated" + EnumOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + EnumOptions_AllowAlias_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.allow_alias" + EnumOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.deprecated" + EnumOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.EnumOptions. +const ( + EnumOptions_AllowAlias_field_number protoreflect.FieldNumber = 2 + EnumOptions_Deprecated_field_number protoreflect.FieldNumber = 3 + EnumOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.EnumValueOptions. +const ( + EnumValueOptions_message_name protoreflect.Name = "EnumValueOptions" + EnumValueOptions_message_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions" +) + +// Field names for google.protobuf.EnumValueOptions. +const ( + EnumValueOptions_Deprecated_field_name protoreflect.Name = "deprecated" + EnumValueOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + EnumValueOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions.deprecated" + EnumValueOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.EnumValueOptions. +const ( + EnumValueOptions_Deprecated_field_number protoreflect.FieldNumber = 1 + EnumValueOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.ServiceOptions. +const ( + ServiceOptions_message_name protoreflect.Name = "ServiceOptions" + ServiceOptions_message_fullname protoreflect.FullName = "google.protobuf.ServiceOptions" +) + +// Field names for google.protobuf.ServiceOptions. +const ( + ServiceOptions_Deprecated_field_name protoreflect.Name = "deprecated" + ServiceOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + ServiceOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.ServiceOptions.deprecated" + ServiceOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.ServiceOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.ServiceOptions. +const ( + ServiceOptions_Deprecated_field_number protoreflect.FieldNumber = 33 + ServiceOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.MethodOptions. +const ( + MethodOptions_message_name protoreflect.Name = "MethodOptions" + MethodOptions_message_fullname protoreflect.FullName = "google.protobuf.MethodOptions" +) + +// Field names for google.protobuf.MethodOptions. +const ( + MethodOptions_Deprecated_field_name protoreflect.Name = "deprecated" + MethodOptions_IdempotencyLevel_field_name protoreflect.Name = "idempotency_level" + MethodOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + MethodOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.deprecated" + MethodOptions_IdempotencyLevel_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.idempotency_level" + MethodOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.MethodOptions. +const ( + MethodOptions_Deprecated_field_number protoreflect.FieldNumber = 33 + MethodOptions_IdempotencyLevel_field_number protoreflect.FieldNumber = 34 + MethodOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Full and short names for google.protobuf.MethodOptions.IdempotencyLevel. +const ( + MethodOptions_IdempotencyLevel_enum_fullname = "google.protobuf.MethodOptions.IdempotencyLevel" + MethodOptions_IdempotencyLevel_enum_name = "IdempotencyLevel" +) + +// Names for google.protobuf.UninterpretedOption. +const ( + UninterpretedOption_message_name protoreflect.Name = "UninterpretedOption" + UninterpretedOption_message_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption" +) + +// Field names for google.protobuf.UninterpretedOption. +const ( + UninterpretedOption_Name_field_name protoreflect.Name = "name" + UninterpretedOption_IdentifierValue_field_name protoreflect.Name = "identifier_value" + UninterpretedOption_PositiveIntValue_field_name protoreflect.Name = "positive_int_value" + UninterpretedOption_NegativeIntValue_field_name protoreflect.Name = "negative_int_value" + UninterpretedOption_DoubleValue_field_name protoreflect.Name = "double_value" + UninterpretedOption_StringValue_field_name protoreflect.Name = "string_value" + UninterpretedOption_AggregateValue_field_name protoreflect.Name = "aggregate_value" + + UninterpretedOption_Name_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.name" + UninterpretedOption_IdentifierValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.identifier_value" + UninterpretedOption_PositiveIntValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.positive_int_value" + UninterpretedOption_NegativeIntValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.negative_int_value" + UninterpretedOption_DoubleValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.double_value" + UninterpretedOption_StringValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.string_value" + UninterpretedOption_AggregateValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.aggregate_value" +) + +// Field numbers for google.protobuf.UninterpretedOption. +const ( + UninterpretedOption_Name_field_number protoreflect.FieldNumber = 2 + UninterpretedOption_IdentifierValue_field_number protoreflect.FieldNumber = 3 + UninterpretedOption_PositiveIntValue_field_number protoreflect.FieldNumber = 4 + UninterpretedOption_NegativeIntValue_field_number protoreflect.FieldNumber = 5 + UninterpretedOption_DoubleValue_field_number protoreflect.FieldNumber = 6 + UninterpretedOption_StringValue_field_number protoreflect.FieldNumber = 7 + UninterpretedOption_AggregateValue_field_number protoreflect.FieldNumber = 8 +) + +// Names for google.protobuf.UninterpretedOption.NamePart. +const ( + UninterpretedOption_NamePart_message_name protoreflect.Name = "NamePart" + UninterpretedOption_NamePart_message_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.NamePart" +) + +// Field names for google.protobuf.UninterpretedOption.NamePart. +const ( + UninterpretedOption_NamePart_NamePart_field_name protoreflect.Name = "name_part" + UninterpretedOption_NamePart_IsExtension_field_name protoreflect.Name = "is_extension" + + UninterpretedOption_NamePart_NamePart_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.NamePart.name_part" + UninterpretedOption_NamePart_IsExtension_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.NamePart.is_extension" +) + +// Field numbers for google.protobuf.UninterpretedOption.NamePart. +const ( + UninterpretedOption_NamePart_NamePart_field_number protoreflect.FieldNumber = 1 + UninterpretedOption_NamePart_IsExtension_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.SourceCodeInfo. +const ( + SourceCodeInfo_message_name protoreflect.Name = "SourceCodeInfo" + SourceCodeInfo_message_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo" +) + +// Field names for google.protobuf.SourceCodeInfo. +const ( + SourceCodeInfo_Location_field_name protoreflect.Name = "location" + + SourceCodeInfo_Location_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.location" +) + +// Field numbers for google.protobuf.SourceCodeInfo. +const ( + SourceCodeInfo_Location_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.SourceCodeInfo.Location. +const ( + SourceCodeInfo_Location_message_name protoreflect.Name = "Location" + SourceCodeInfo_Location_message_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location" +) + +// Field names for google.protobuf.SourceCodeInfo.Location. +const ( + SourceCodeInfo_Location_Path_field_name protoreflect.Name = "path" + SourceCodeInfo_Location_Span_field_name protoreflect.Name = "span" + SourceCodeInfo_Location_LeadingComments_field_name protoreflect.Name = "leading_comments" + SourceCodeInfo_Location_TrailingComments_field_name protoreflect.Name = "trailing_comments" + SourceCodeInfo_Location_LeadingDetachedComments_field_name protoreflect.Name = "leading_detached_comments" + + SourceCodeInfo_Location_Path_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.path" + SourceCodeInfo_Location_Span_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.span" + SourceCodeInfo_Location_LeadingComments_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.leading_comments" + SourceCodeInfo_Location_TrailingComments_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.trailing_comments" + SourceCodeInfo_Location_LeadingDetachedComments_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.leading_detached_comments" +) + +// Field numbers for google.protobuf.SourceCodeInfo.Location. +const ( + SourceCodeInfo_Location_Path_field_number protoreflect.FieldNumber = 1 + SourceCodeInfo_Location_Span_field_number protoreflect.FieldNumber = 2 + SourceCodeInfo_Location_LeadingComments_field_number protoreflect.FieldNumber = 3 + SourceCodeInfo_Location_TrailingComments_field_number protoreflect.FieldNumber = 4 + SourceCodeInfo_Location_LeadingDetachedComments_field_number protoreflect.FieldNumber = 6 +) + +// Names for google.protobuf.GeneratedCodeInfo. +const ( + GeneratedCodeInfo_message_name protoreflect.Name = "GeneratedCodeInfo" + GeneratedCodeInfo_message_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo" +) + +// Field names for google.protobuf.GeneratedCodeInfo. +const ( + GeneratedCodeInfo_Annotation_field_name protoreflect.Name = "annotation" + + GeneratedCodeInfo_Annotation_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.annotation" +) + +// Field numbers for google.protobuf.GeneratedCodeInfo. +const ( + GeneratedCodeInfo_Annotation_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.GeneratedCodeInfo.Annotation. +const ( + GeneratedCodeInfo_Annotation_message_name protoreflect.Name = "Annotation" + GeneratedCodeInfo_Annotation_message_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation" +) + +// Field names for google.protobuf.GeneratedCodeInfo.Annotation. +const ( + GeneratedCodeInfo_Annotation_Path_field_name protoreflect.Name = "path" + GeneratedCodeInfo_Annotation_SourceFile_field_name protoreflect.Name = "source_file" + GeneratedCodeInfo_Annotation_Begin_field_name protoreflect.Name = "begin" + GeneratedCodeInfo_Annotation_End_field_name protoreflect.Name = "end" + + GeneratedCodeInfo_Annotation_Path_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation.path" + GeneratedCodeInfo_Annotation_SourceFile_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation.source_file" + GeneratedCodeInfo_Annotation_Begin_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation.begin" + GeneratedCodeInfo_Annotation_End_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation.end" +) + +// Field numbers for google.protobuf.GeneratedCodeInfo.Annotation. +const ( + GeneratedCodeInfo_Annotation_Path_field_number protoreflect.FieldNumber = 1 + GeneratedCodeInfo_Annotation_SourceFile_field_number protoreflect.FieldNumber = 2 + GeneratedCodeInfo_Annotation_Begin_field_number protoreflect.FieldNumber = 3 + GeneratedCodeInfo_Annotation_End_field_number protoreflect.FieldNumber = 4 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/doc.go b/vendor/google.golang.org/protobuf/internal/genid/doc.go new file mode 100644 index 00000000000..45ccd01211c --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/doc.go @@ -0,0 +1,11 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package genid contains constants for declarations in descriptor.proto +// and the well-known types. +package genid + +import protoreflect "google.golang.org/protobuf/reflect/protoreflect" + +const GoogleProtobuf_package protoreflect.FullName = "google.protobuf" diff --git a/vendor/google.golang.org/protobuf/internal/genid/duration_gen.go b/vendor/google.golang.org/protobuf/internal/genid/duration_gen.go new file mode 100644 index 00000000000..b070ef4fd69 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/duration_gen.go @@ -0,0 +1,34 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_duration_proto = "google/protobuf/duration.proto" + +// Names for google.protobuf.Duration. +const ( + Duration_message_name protoreflect.Name = "Duration" + Duration_message_fullname protoreflect.FullName = "google.protobuf.Duration" +) + +// Field names for google.protobuf.Duration. +const ( + Duration_Seconds_field_name protoreflect.Name = "seconds" + Duration_Nanos_field_name protoreflect.Name = "nanos" + + Duration_Seconds_field_fullname protoreflect.FullName = "google.protobuf.Duration.seconds" + Duration_Nanos_field_fullname protoreflect.FullName = "google.protobuf.Duration.nanos" +) + +// Field numbers for google.protobuf.Duration. +const ( + Duration_Seconds_field_number protoreflect.FieldNumber = 1 + Duration_Nanos_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/empty_gen.go b/vendor/google.golang.org/protobuf/internal/genid/empty_gen.go new file mode 100644 index 00000000000..762abb34a4e --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/empty_gen.go @@ -0,0 +1,19 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_empty_proto = "google/protobuf/empty.proto" + +// Names for google.protobuf.Empty. +const ( + Empty_message_name protoreflect.Name = "Empty" + Empty_message_fullname protoreflect.FullName = "google.protobuf.Empty" +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go b/vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go new file mode 100644 index 00000000000..70bed453fee --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go @@ -0,0 +1,31 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_field_mask_proto = "google/protobuf/field_mask.proto" + +// Names for google.protobuf.FieldMask. +const ( + FieldMask_message_name protoreflect.Name = "FieldMask" + FieldMask_message_fullname protoreflect.FullName = "google.protobuf.FieldMask" +) + +// Field names for google.protobuf.FieldMask. +const ( + FieldMask_Paths_field_name protoreflect.Name = "paths" + + FieldMask_Paths_field_fullname protoreflect.FullName = "google.protobuf.FieldMask.paths" +) + +// Field numbers for google.protobuf.FieldMask. +const ( + FieldMask_Paths_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/goname.go b/vendor/google.golang.org/protobuf/internal/genid/goname.go new file mode 100644 index 00000000000..693d2e9e1fe --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/goname.go @@ -0,0 +1,25 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package genid + +// Go names of implementation-specific struct fields in generated messages. +const ( + State_goname = "state" + + SizeCache_goname = "sizeCache" + SizeCacheA_goname = "XXX_sizecache" + + WeakFields_goname = "weakFields" + WeakFieldsA_goname = "XXX_weak" + + UnknownFields_goname = "unknownFields" + UnknownFieldsA_goname = "XXX_unrecognized" + + ExtensionFields_goname = "extensionFields" + ExtensionFieldsA_goname = "XXX_InternalExtensions" + ExtensionFieldsB_goname = "XXX_extensions" + + WeakFieldPrefix_goname = "XXX_weak_" +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/map_entry.go b/vendor/google.golang.org/protobuf/internal/genid/map_entry.go new file mode 100644 index 00000000000..8f9ea02ff2a --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/map_entry.go @@ -0,0 +1,16 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package genid + +import protoreflect "google.golang.org/protobuf/reflect/protoreflect" + +// Generic field names and numbers for synthetic map entry messages. +const ( + MapEntry_Key_field_name protoreflect.Name = "key" + MapEntry_Value_field_name protoreflect.Name = "value" + + MapEntry_Key_field_number protoreflect.FieldNumber = 1 + MapEntry_Value_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go b/vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go new file mode 100644 index 00000000000..3e99ae16c84 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go @@ -0,0 +1,31 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_source_context_proto = "google/protobuf/source_context.proto" + +// Names for google.protobuf.SourceContext. +const ( + SourceContext_message_name protoreflect.Name = "SourceContext" + SourceContext_message_fullname protoreflect.FullName = "google.protobuf.SourceContext" +) + +// Field names for google.protobuf.SourceContext. +const ( + SourceContext_FileName_field_name protoreflect.Name = "file_name" + + SourceContext_FileName_field_fullname protoreflect.FullName = "google.protobuf.SourceContext.file_name" +) + +// Field numbers for google.protobuf.SourceContext. +const ( + SourceContext_FileName_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/struct_gen.go b/vendor/google.golang.org/protobuf/internal/genid/struct_gen.go new file mode 100644 index 00000000000..1a38944b26e --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/struct_gen.go @@ -0,0 +1,116 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_struct_proto = "google/protobuf/struct.proto" + +// Full and short names for google.protobuf.NullValue. +const ( + NullValue_enum_fullname = "google.protobuf.NullValue" + NullValue_enum_name = "NullValue" +) + +// Names for google.protobuf.Struct. +const ( + Struct_message_name protoreflect.Name = "Struct" + Struct_message_fullname protoreflect.FullName = "google.protobuf.Struct" +) + +// Field names for google.protobuf.Struct. +const ( + Struct_Fields_field_name protoreflect.Name = "fields" + + Struct_Fields_field_fullname protoreflect.FullName = "google.protobuf.Struct.fields" +) + +// Field numbers for google.protobuf.Struct. +const ( + Struct_Fields_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.Struct.FieldsEntry. +const ( + Struct_FieldsEntry_message_name protoreflect.Name = "FieldsEntry" + Struct_FieldsEntry_message_fullname protoreflect.FullName = "google.protobuf.Struct.FieldsEntry" +) + +// Field names for google.protobuf.Struct.FieldsEntry. +const ( + Struct_FieldsEntry_Key_field_name protoreflect.Name = "key" + Struct_FieldsEntry_Value_field_name protoreflect.Name = "value" + + Struct_FieldsEntry_Key_field_fullname protoreflect.FullName = "google.protobuf.Struct.FieldsEntry.key" + Struct_FieldsEntry_Value_field_fullname protoreflect.FullName = "google.protobuf.Struct.FieldsEntry.value" +) + +// Field numbers for google.protobuf.Struct.FieldsEntry. +const ( + Struct_FieldsEntry_Key_field_number protoreflect.FieldNumber = 1 + Struct_FieldsEntry_Value_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.Value. +const ( + Value_message_name protoreflect.Name = "Value" + Value_message_fullname protoreflect.FullName = "google.protobuf.Value" +) + +// Field names for google.protobuf.Value. +const ( + Value_NullValue_field_name protoreflect.Name = "null_value" + Value_NumberValue_field_name protoreflect.Name = "number_value" + Value_StringValue_field_name protoreflect.Name = "string_value" + Value_BoolValue_field_name protoreflect.Name = "bool_value" + Value_StructValue_field_name protoreflect.Name = "struct_value" + Value_ListValue_field_name protoreflect.Name = "list_value" + + Value_NullValue_field_fullname protoreflect.FullName = "google.protobuf.Value.null_value" + Value_NumberValue_field_fullname protoreflect.FullName = "google.protobuf.Value.number_value" + Value_StringValue_field_fullname protoreflect.FullName = "google.protobuf.Value.string_value" + Value_BoolValue_field_fullname protoreflect.FullName = "google.protobuf.Value.bool_value" + Value_StructValue_field_fullname protoreflect.FullName = "google.protobuf.Value.struct_value" + Value_ListValue_field_fullname protoreflect.FullName = "google.protobuf.Value.list_value" +) + +// Field numbers for google.protobuf.Value. +const ( + Value_NullValue_field_number protoreflect.FieldNumber = 1 + Value_NumberValue_field_number protoreflect.FieldNumber = 2 + Value_StringValue_field_number protoreflect.FieldNumber = 3 + Value_BoolValue_field_number protoreflect.FieldNumber = 4 + Value_StructValue_field_number protoreflect.FieldNumber = 5 + Value_ListValue_field_number protoreflect.FieldNumber = 6 +) + +// Oneof names for google.protobuf.Value. +const ( + Value_Kind_oneof_name protoreflect.Name = "kind" + + Value_Kind_oneof_fullname protoreflect.FullName = "google.protobuf.Value.kind" +) + +// Names for google.protobuf.ListValue. +const ( + ListValue_message_name protoreflect.Name = "ListValue" + ListValue_message_fullname protoreflect.FullName = "google.protobuf.ListValue" +) + +// Field names for google.protobuf.ListValue. +const ( + ListValue_Values_field_name protoreflect.Name = "values" + + ListValue_Values_field_fullname protoreflect.FullName = "google.protobuf.ListValue.values" +) + +// Field numbers for google.protobuf.ListValue. +const ( + ListValue_Values_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go b/vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go new file mode 100644 index 00000000000..f5cd5634c2e --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go @@ -0,0 +1,34 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_timestamp_proto = "google/protobuf/timestamp.proto" + +// Names for google.protobuf.Timestamp. +const ( + Timestamp_message_name protoreflect.Name = "Timestamp" + Timestamp_message_fullname protoreflect.FullName = "google.protobuf.Timestamp" +) + +// Field names for google.protobuf.Timestamp. +const ( + Timestamp_Seconds_field_name protoreflect.Name = "seconds" + Timestamp_Nanos_field_name protoreflect.Name = "nanos" + + Timestamp_Seconds_field_fullname protoreflect.FullName = "google.protobuf.Timestamp.seconds" + Timestamp_Nanos_field_fullname protoreflect.FullName = "google.protobuf.Timestamp.nanos" +) + +// Field numbers for google.protobuf.Timestamp. +const ( + Timestamp_Seconds_field_number protoreflect.FieldNumber = 1 + Timestamp_Nanos_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/type_gen.go b/vendor/google.golang.org/protobuf/internal/genid/type_gen.go new file mode 100644 index 00000000000..3bc710138ad --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/type_gen.go @@ -0,0 +1,184 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_type_proto = "google/protobuf/type.proto" + +// Full and short names for google.protobuf.Syntax. +const ( + Syntax_enum_fullname = "google.protobuf.Syntax" + Syntax_enum_name = "Syntax" +) + +// Names for google.protobuf.Type. +const ( + Type_message_name protoreflect.Name = "Type" + Type_message_fullname protoreflect.FullName = "google.protobuf.Type" +) + +// Field names for google.protobuf.Type. +const ( + Type_Name_field_name protoreflect.Name = "name" + Type_Fields_field_name protoreflect.Name = "fields" + Type_Oneofs_field_name protoreflect.Name = "oneofs" + Type_Options_field_name protoreflect.Name = "options" + Type_SourceContext_field_name protoreflect.Name = "source_context" + Type_Syntax_field_name protoreflect.Name = "syntax" + + Type_Name_field_fullname protoreflect.FullName = "google.protobuf.Type.name" + Type_Fields_field_fullname protoreflect.FullName = "google.protobuf.Type.fields" + Type_Oneofs_field_fullname protoreflect.FullName = "google.protobuf.Type.oneofs" + Type_Options_field_fullname protoreflect.FullName = "google.protobuf.Type.options" + Type_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Type.source_context" + Type_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Type.syntax" +) + +// Field numbers for google.protobuf.Type. +const ( + Type_Name_field_number protoreflect.FieldNumber = 1 + Type_Fields_field_number protoreflect.FieldNumber = 2 + Type_Oneofs_field_number protoreflect.FieldNumber = 3 + Type_Options_field_number protoreflect.FieldNumber = 4 + Type_SourceContext_field_number protoreflect.FieldNumber = 5 + Type_Syntax_field_number protoreflect.FieldNumber = 6 +) + +// Names for google.protobuf.Field. +const ( + Field_message_name protoreflect.Name = "Field" + Field_message_fullname protoreflect.FullName = "google.protobuf.Field" +) + +// Field names for google.protobuf.Field. +const ( + Field_Kind_field_name protoreflect.Name = "kind" + Field_Cardinality_field_name protoreflect.Name = "cardinality" + Field_Number_field_name protoreflect.Name = "number" + Field_Name_field_name protoreflect.Name = "name" + Field_TypeUrl_field_name protoreflect.Name = "type_url" + Field_OneofIndex_field_name protoreflect.Name = "oneof_index" + Field_Packed_field_name protoreflect.Name = "packed" + Field_Options_field_name protoreflect.Name = "options" + Field_JsonName_field_name protoreflect.Name = "json_name" + Field_DefaultValue_field_name protoreflect.Name = "default_value" + + Field_Kind_field_fullname protoreflect.FullName = "google.protobuf.Field.kind" + Field_Cardinality_field_fullname protoreflect.FullName = "google.protobuf.Field.cardinality" + Field_Number_field_fullname protoreflect.FullName = "google.protobuf.Field.number" + Field_Name_field_fullname protoreflect.FullName = "google.protobuf.Field.name" + Field_TypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Field.type_url" + Field_OneofIndex_field_fullname protoreflect.FullName = "google.protobuf.Field.oneof_index" + Field_Packed_field_fullname protoreflect.FullName = "google.protobuf.Field.packed" + Field_Options_field_fullname protoreflect.FullName = "google.protobuf.Field.options" + Field_JsonName_field_fullname protoreflect.FullName = "google.protobuf.Field.json_name" + Field_DefaultValue_field_fullname protoreflect.FullName = "google.protobuf.Field.default_value" +) + +// Field numbers for google.protobuf.Field. +const ( + Field_Kind_field_number protoreflect.FieldNumber = 1 + Field_Cardinality_field_number protoreflect.FieldNumber = 2 + Field_Number_field_number protoreflect.FieldNumber = 3 + Field_Name_field_number protoreflect.FieldNumber = 4 + Field_TypeUrl_field_number protoreflect.FieldNumber = 6 + Field_OneofIndex_field_number protoreflect.FieldNumber = 7 + Field_Packed_field_number protoreflect.FieldNumber = 8 + Field_Options_field_number protoreflect.FieldNumber = 9 + Field_JsonName_field_number protoreflect.FieldNumber = 10 + Field_DefaultValue_field_number protoreflect.FieldNumber = 11 +) + +// Full and short names for google.protobuf.Field.Kind. +const ( + Field_Kind_enum_fullname = "google.protobuf.Field.Kind" + Field_Kind_enum_name = "Kind" +) + +// Full and short names for google.protobuf.Field.Cardinality. +const ( + Field_Cardinality_enum_fullname = "google.protobuf.Field.Cardinality" + Field_Cardinality_enum_name = "Cardinality" +) + +// Names for google.protobuf.Enum. +const ( + Enum_message_name protoreflect.Name = "Enum" + Enum_message_fullname protoreflect.FullName = "google.protobuf.Enum" +) + +// Field names for google.protobuf.Enum. +const ( + Enum_Name_field_name protoreflect.Name = "name" + Enum_Enumvalue_field_name protoreflect.Name = "enumvalue" + Enum_Options_field_name protoreflect.Name = "options" + Enum_SourceContext_field_name protoreflect.Name = "source_context" + Enum_Syntax_field_name protoreflect.Name = "syntax" + + Enum_Name_field_fullname protoreflect.FullName = "google.protobuf.Enum.name" + Enum_Enumvalue_field_fullname protoreflect.FullName = "google.protobuf.Enum.enumvalue" + Enum_Options_field_fullname protoreflect.FullName = "google.protobuf.Enum.options" + Enum_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Enum.source_context" + Enum_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Enum.syntax" +) + +// Field numbers for google.protobuf.Enum. +const ( + Enum_Name_field_number protoreflect.FieldNumber = 1 + Enum_Enumvalue_field_number protoreflect.FieldNumber = 2 + Enum_Options_field_number protoreflect.FieldNumber = 3 + Enum_SourceContext_field_number protoreflect.FieldNumber = 4 + Enum_Syntax_field_number protoreflect.FieldNumber = 5 +) + +// Names for google.protobuf.EnumValue. +const ( + EnumValue_message_name protoreflect.Name = "EnumValue" + EnumValue_message_fullname protoreflect.FullName = "google.protobuf.EnumValue" +) + +// Field names for google.protobuf.EnumValue. +const ( + EnumValue_Name_field_name protoreflect.Name = "name" + EnumValue_Number_field_name protoreflect.Name = "number" + EnumValue_Options_field_name protoreflect.Name = "options" + + EnumValue_Name_field_fullname protoreflect.FullName = "google.protobuf.EnumValue.name" + EnumValue_Number_field_fullname protoreflect.FullName = "google.protobuf.EnumValue.number" + EnumValue_Options_field_fullname protoreflect.FullName = "google.protobuf.EnumValue.options" +) + +// Field numbers for google.protobuf.EnumValue. +const ( + EnumValue_Name_field_number protoreflect.FieldNumber = 1 + EnumValue_Number_field_number protoreflect.FieldNumber = 2 + EnumValue_Options_field_number protoreflect.FieldNumber = 3 +) + +// Names for google.protobuf.Option. +const ( + Option_message_name protoreflect.Name = "Option" + Option_message_fullname protoreflect.FullName = "google.protobuf.Option" +) + +// Field names for google.protobuf.Option. +const ( + Option_Name_field_name protoreflect.Name = "name" + Option_Value_field_name protoreflect.Name = "value" + + Option_Name_field_fullname protoreflect.FullName = "google.protobuf.Option.name" + Option_Value_field_fullname protoreflect.FullName = "google.protobuf.Option.value" +) + +// Field numbers for google.protobuf.Option. +const ( + Option_Name_field_number protoreflect.FieldNumber = 1 + Option_Value_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/wrappers.go b/vendor/google.golang.org/protobuf/internal/genid/wrappers.go new file mode 100644 index 00000000000..429384b85b0 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/wrappers.go @@ -0,0 +1,13 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package genid + +import protoreflect "google.golang.org/protobuf/reflect/protoreflect" + +// Generic field name and number for messages in wrappers.proto. +const ( + WrapperValue_Value_field_name protoreflect.Name = "value" + WrapperValue_Value_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go b/vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go new file mode 100644 index 00000000000..72527d2ab03 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go @@ -0,0 +1,175 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_wrappers_proto = "google/protobuf/wrappers.proto" + +// Names for google.protobuf.DoubleValue. +const ( + DoubleValue_message_name protoreflect.Name = "DoubleValue" + DoubleValue_message_fullname protoreflect.FullName = "google.protobuf.DoubleValue" +) + +// Field names for google.protobuf.DoubleValue. +const ( + DoubleValue_Value_field_name protoreflect.Name = "value" + + DoubleValue_Value_field_fullname protoreflect.FullName = "google.protobuf.DoubleValue.value" +) + +// Field numbers for google.protobuf.DoubleValue. +const ( + DoubleValue_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.FloatValue. +const ( + FloatValue_message_name protoreflect.Name = "FloatValue" + FloatValue_message_fullname protoreflect.FullName = "google.protobuf.FloatValue" +) + +// Field names for google.protobuf.FloatValue. +const ( + FloatValue_Value_field_name protoreflect.Name = "value" + + FloatValue_Value_field_fullname protoreflect.FullName = "google.protobuf.FloatValue.value" +) + +// Field numbers for google.protobuf.FloatValue. +const ( + FloatValue_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.Int64Value. +const ( + Int64Value_message_name protoreflect.Name = "Int64Value" + Int64Value_message_fullname protoreflect.FullName = "google.protobuf.Int64Value" +) + +// Field names for google.protobuf.Int64Value. +const ( + Int64Value_Value_field_name protoreflect.Name = "value" + + Int64Value_Value_field_fullname protoreflect.FullName = "google.protobuf.Int64Value.value" +) + +// Field numbers for google.protobuf.Int64Value. +const ( + Int64Value_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.UInt64Value. +const ( + UInt64Value_message_name protoreflect.Name = "UInt64Value" + UInt64Value_message_fullname protoreflect.FullName = "google.protobuf.UInt64Value" +) + +// Field names for google.protobuf.UInt64Value. +const ( + UInt64Value_Value_field_name protoreflect.Name = "value" + + UInt64Value_Value_field_fullname protoreflect.FullName = "google.protobuf.UInt64Value.value" +) + +// Field numbers for google.protobuf.UInt64Value. +const ( + UInt64Value_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.Int32Value. +const ( + Int32Value_message_name protoreflect.Name = "Int32Value" + Int32Value_message_fullname protoreflect.FullName = "google.protobuf.Int32Value" +) + +// Field names for google.protobuf.Int32Value. +const ( + Int32Value_Value_field_name protoreflect.Name = "value" + + Int32Value_Value_field_fullname protoreflect.FullName = "google.protobuf.Int32Value.value" +) + +// Field numbers for google.protobuf.Int32Value. +const ( + Int32Value_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.UInt32Value. +const ( + UInt32Value_message_name protoreflect.Name = "UInt32Value" + UInt32Value_message_fullname protoreflect.FullName = "google.protobuf.UInt32Value" +) + +// Field names for google.protobuf.UInt32Value. +const ( + UInt32Value_Value_field_name protoreflect.Name = "value" + + UInt32Value_Value_field_fullname protoreflect.FullName = "google.protobuf.UInt32Value.value" +) + +// Field numbers for google.protobuf.UInt32Value. +const ( + UInt32Value_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.BoolValue. +const ( + BoolValue_message_name protoreflect.Name = "BoolValue" + BoolValue_message_fullname protoreflect.FullName = "google.protobuf.BoolValue" +) + +// Field names for google.protobuf.BoolValue. +const ( + BoolValue_Value_field_name protoreflect.Name = "value" + + BoolValue_Value_field_fullname protoreflect.FullName = "google.protobuf.BoolValue.value" +) + +// Field numbers for google.protobuf.BoolValue. +const ( + BoolValue_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.StringValue. +const ( + StringValue_message_name protoreflect.Name = "StringValue" + StringValue_message_fullname protoreflect.FullName = "google.protobuf.StringValue" +) + +// Field names for google.protobuf.StringValue. +const ( + StringValue_Value_field_name protoreflect.Name = "value" + + StringValue_Value_field_fullname protoreflect.FullName = "google.protobuf.StringValue.value" +) + +// Field numbers for google.protobuf.StringValue. +const ( + StringValue_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.BytesValue. +const ( + BytesValue_message_name protoreflect.Name = "BytesValue" + BytesValue_message_fullname protoreflect.FullName = "google.protobuf.BytesValue" +) + +// Field names for google.protobuf.BytesValue. +const ( + BytesValue_Value_field_name protoreflect.Name = "value" + + BytesValue_Value_field_fullname protoreflect.FullName = "google.protobuf.BytesValue.value" +) + +// Field numbers for google.protobuf.BytesValue. +const ( + BytesValue_Value_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genname/name.go b/vendor/google.golang.org/protobuf/internal/genname/name.go deleted file mode 100644 index f45509fbdf3..00000000000 --- a/vendor/google.golang.org/protobuf/internal/genname/name.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package genname contains constants for generated names. -package genname - -const ( - State = "state" - - SizeCache = "sizeCache" - SizeCacheA = "XXX_sizecache" - - WeakFields = "weakFields" - WeakFieldsA = "XXX_weak" - - UnknownFields = "unknownFields" - UnknownFieldsA = "XXX_unrecognized" - - ExtensionFields = "extensionFields" - ExtensionFieldsA = "XXX_InternalExtensions" - ExtensionFieldsB = "XXX_extensions" - - WeakFieldPrefix = "XXX_weak_" -) diff --git a/vendor/google.golang.org/protobuf/internal/impl/api_export.go b/vendor/google.golang.org/protobuf/internal/impl/api_export.go index 4d22c960429..b5974528db6 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/api_export.go +++ b/vendor/google.golang.org/protobuf/internal/impl/api_export.go @@ -10,6 +10,7 @@ import ( "strconv" "google.golang.org/protobuf/encoding/prototext" + "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/proto" pref "google.golang.org/protobuf/reflect/protoreflect" piface "google.golang.org/protobuf/runtime/protoiface" @@ -19,6 +20,12 @@ import ( // functions that we do not want to appear in godoc. type Export struct{} +// NewError formats a string according to the format specifier and arguments and +// returns an error that has a "proto" prefix. +func (Export) NewError(f string, x ...interface{}) error { + return errors.New(f, x...) +} + // enum is any enum type generated by protoc-gen-go // and must be a named int32 type. type enum = interface{} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_map.go b/vendor/google.golang.org/protobuf/internal/impl/codec_map.go index 35a67c25bfc..44885a761f6 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/codec_map.go +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_map.go @@ -10,6 +10,7 @@ import ( "sort" "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/genid" pref "google.golang.org/protobuf/reflect/protoreflect" ) @@ -134,7 +135,7 @@ func consumeMap(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi *mapInfo b = b[n:] err := errUnknown switch num { - case 1: + case genid.MapEntry_Key_field_number: var v pref.Value var o unmarshalOutput v, o, err = mapi.keyFuncs.unmarshal(b, key, num, wtyp, opts) @@ -143,7 +144,7 @@ func consumeMap(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi *mapInfo } key = v n = o.n - case 2: + case genid.MapEntry_Value_field_number: var v pref.Value var o unmarshalOutput v, o, err = mapi.valFuncs.unmarshal(b, val, num, wtyp, opts) diff --git a/vendor/google.golang.org/protobuf/internal/impl/message.go b/vendor/google.golang.org/protobuf/internal/impl/message.go index 7dd994bd95d..c026a98180d 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/message.go +++ b/vendor/google.golang.org/protobuf/internal/impl/message.go @@ -12,7 +12,7 @@ import ( "sync" "sync/atomic" - "google.golang.org/protobuf/internal/genname" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/reflect/protoreflect" pref "google.golang.org/protobuf/reflect/protoreflect" ) @@ -148,19 +148,19 @@ func (mi *MessageInfo) makeStructInfo(t reflect.Type) structInfo { fieldLoop: for i := 0; i < t.NumField(); i++ { switch f := t.Field(i); f.Name { - case genname.SizeCache, genname.SizeCacheA: + case genid.SizeCache_goname, genid.SizeCacheA_goname: if f.Type == sizecacheType { si.sizecacheOffset = offsetOf(f, mi.Exporter) } - case genname.WeakFields, genname.WeakFieldsA: + case genid.WeakFields_goname, genid.WeakFieldsA_goname: if f.Type == weakFieldsType { si.weakOffset = offsetOf(f, mi.Exporter) } - case genname.UnknownFields, genname.UnknownFieldsA: + case genid.UnknownFields_goname, genid.UnknownFieldsA_goname: if f.Type == unknownFieldsType { si.unknownOffset = offsetOf(f, mi.Exporter) } - case genname.ExtensionFields, genname.ExtensionFieldsA, genname.ExtensionFieldsB: + case genid.ExtensionFields_goname, genid.ExtensionFieldsA_goname, genid.ExtensionFieldsB_goname: if f.Type == extensionFieldsType { si.extensionOffset = offsetOf(f, mi.Exporter) } diff --git a/vendor/google.golang.org/protobuf/internal/impl/validate.go b/vendor/google.golang.org/protobuf/internal/impl/validate.go index 57de9cc85b1..08cfb6054b4 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/validate.go +++ b/vendor/google.golang.org/protobuf/internal/impl/validate.go @@ -14,6 +14,7 @@ import ( "google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/strs" pref "google.golang.org/protobuf/reflect/protoreflect" preg "google.golang.org/protobuf/reflect/protoregistry" @@ -282,9 +283,9 @@ State: switch { case st.typ == validationTypeMap: switch num { - case 1: + case genid.MapEntry_Key_field_number: vi.typ = st.keyType - case 2: + case genid.MapEntry_Value_field_number: vi.typ = st.valType vi.mi = st.mi vi.requiredBit = 1 diff --git a/vendor/google.golang.org/protobuf/internal/version/version.go b/vendor/google.golang.org/protobuf/internal/version/version.go index 6b3001c66af..72cf770b427 100644 --- a/vendor/google.golang.org/protobuf/internal/version/version.go +++ b/vendor/google.golang.org/protobuf/internal/version/version.go @@ -52,7 +52,7 @@ import ( // 10. Send out the CL for review and submit it. const ( Major = 1 - Minor = 24 + Minor = 25 Patch = 0 PreRelease = "" ) diff --git a/vendor/google.golang.org/protobuf/proto/decode.go b/vendor/google.golang.org/protobuf/proto/decode.go index 4974b16d544..42fc5195e87 100644 --- a/vendor/google.golang.org/protobuf/proto/decode.go +++ b/vendor/google.golang.org/protobuf/proto/decode.go @@ -9,6 +9,7 @@ import ( "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoregistry" @@ -220,13 +221,13 @@ func (o UnmarshalOptions) unmarshalMap(b []byte, wtyp protowire.Type, mapv proto b = b[n:] err = errUnknown switch num { - case 1: + case genid.MapEntry_Key_field_number: key, n, err = o.unmarshalScalar(b, wtyp, keyField) if err != nil { break } haveKey = true - case 2: + case genid.MapEntry_Value_field_number: var v protoreflect.Value v, n, err = o.unmarshalScalar(b, wtyp, valField) if err != nil { diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go index cf9641ab922..2d5fa9936ba 100644 --- a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go @@ -12,6 +12,7 @@ import ( "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/internal/filedesc" "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/strs" "google.golang.org/protobuf/reflect/protoreflect" @@ -348,9 +349,9 @@ func checkValidMap(fd protoreflect.FieldDescriptor) error { kf := md.Fields().Get(0) vf := md.Fields().Get(1) switch { - case kf.Name() != "key" || kf.Number() != 1 || kf.Cardinality() != protoreflect.Optional || kf.ContainingOneof() != nil || kf.HasDefault(): + case kf.Name() != genid.MapEntry_Key_field_name || kf.Number() != genid.MapEntry_Key_field_number || kf.Cardinality() != protoreflect.Optional || kf.ContainingOneof() != nil || kf.HasDefault(): return errors.New("invalid key field") - case vf.Name() != "value" || vf.Number() != 2 || vf.Cardinality() != protoreflect.Optional || vf.ContainingOneof() != nil || vf.HasDefault(): + case vf.Name() != genid.MapEntry_Value_field_name || vf.Number() != genid.MapEntry_Value_field_number || vf.Cardinality() != protoreflect.Optional || vf.ContainingOneof() != nil || vf.HasDefault(): return errors.New("invalid value field") } switch kf.Kind() { diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go index b669a4e7619..dd85915bd4b 100644 --- a/vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go @@ -128,7 +128,6 @@ package protoreflect import ( "fmt" - "regexp" "strings" "google.golang.org/protobuf/encoding/protowire" @@ -408,19 +407,14 @@ type EnumRanges interface { doNotImplement } -var ( - regexName = regexp.MustCompile(`^[_a-zA-Z][_a-zA-Z0-9]*$`) - regexFullName = regexp.MustCompile(`^[_a-zA-Z][_a-zA-Z0-9]*(\.[_a-zA-Z][_a-zA-Z0-9]*)*$`) -) - // Name is the short name for a proto declaration. This is not the name // as used in Go source code, which might not be identical to the proto name. type Name string // e.g., "Kind" -// IsValid reports whether n is a syntactically valid name. +// IsValid reports whether s is a syntactically valid name. // An empty name is invalid. -func (n Name) IsValid() bool { - return regexName.MatchString(string(n)) +func (s Name) IsValid() bool { + return consumeIdent(string(s)) == len(s) } // Names represent a list of names. @@ -443,10 +437,42 @@ type Names interface { // This should not have any leading or trailing dots. type FullName string // e.g., "google.protobuf.Field.Kind" -// IsValid reports whether n is a syntactically valid full name. +// IsValid reports whether s is a syntactically valid full name. // An empty full name is invalid. -func (n FullName) IsValid() bool { - return regexFullName.MatchString(string(n)) +func (s FullName) IsValid() bool { + i := consumeIdent(string(s)) + if i < 0 { + return false + } + for len(s) > i { + if s[i] != '.' { + return false + } + i++ + n := consumeIdent(string(s[i:])) + if n < 0 { + return false + } + i += n + } + return true +} + +func consumeIdent(s string) (i int) { + if len(s) == 0 || !isLetter(s[i]) { + return -1 + } + i++ + for len(s) > i && isLetterDigit(s[i]) { + i++ + } + return i +} +func isLetter(c byte) bool { + return c == '_' || ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') +} +func isLetterDigit(c byte) bool { + return isLetter(c) || ('0' <= c && c <= '9') } // Name returns the short name, which is the last identifier segment. diff --git a/vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go b/vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go index 7046ef20885..7db6e55987f 100644 --- a/vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go +++ b/vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go @@ -14,6 +14,39 @@ import ( "google.golang.org/protobuf/runtime/protoimpl" ) +// enum is a dynamic protoreflect.Enum. +type enum struct { + num pref.EnumNumber + typ pref.EnumType +} + +func (e enum) Descriptor() pref.EnumDescriptor { return e.typ.Descriptor() } +func (e enum) Type() pref.EnumType { return e.typ } +func (e enum) Number() pref.EnumNumber { return e.num } + +// enumType is a dynamic protoreflect.EnumType. +type enumType struct { + desc pref.EnumDescriptor +} + +// NewEnumType creates a new EnumType with the provided descriptor. +// +// EnumTypes created by this package are equal if their descriptors are equal. +// That is, if ed1 == ed2, then NewEnumType(ed1) == NewEnumType(ed2). +// +// Enum values created by the EnumType are equal if their numbers are equal. +func NewEnumType(desc pref.EnumDescriptor) pref.EnumType { + return enumType{desc} +} + +func (et enumType) New(n pref.EnumNumber) pref.Enum { return enum{n, et} } +func (et enumType) Descriptor() pref.EnumDescriptor { return et.desc } + +// extensionType is a dynamic protoreflect.ExtensionType. +type extensionType struct { + desc extensionTypeDescriptor +} + // A Message is a dynamically constructed protocol buffer message. // // Message implements the proto.Message interface, and may be used with all @@ -577,11 +610,6 @@ func newListEntry(fd pref.FieldDescriptor) pref.Value { panic(errors.New("%v: unknown kind %v", fd.FullName(), fd.Kind())) } -// extensionType is a dynamic protoreflect.ExtensionType. -type extensionType struct { - desc extensionTypeDescriptor -} - // NewExtensionType creates a new ExtensionType with the provided descriptor. // // Dynamic ExtensionTypes with the same descriptor compare as equal. That is, diff --git a/vendor/google.golang.org/protobuf/types/known/anypb/any.pb.go b/vendor/google.golang.org/protobuf/types/known/anypb/any.pb.go index 5f9498e4e44..82a473e2652 100644 --- a/vendor/google.golang.org/protobuf/types/known/anypb/any.pb.go +++ b/vendor/google.golang.org/protobuf/types/known/anypb/any.pb.go @@ -31,12 +31,100 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: google/protobuf/any.proto +// Package anypb contains generated types for google/protobuf/any.proto. +// +// The Any message is a dynamic representation of any other message value. +// It is functionally a tuple of the full name of the remote message type and +// the serialized bytes of the remote message value. +// +// +// Constructing an Any +// +// An Any message containing another message value is constructed using New: +// +// any, err := anypb.New(m) +// if err != nil { +// ... // handle error +// } +// ... // make use of any +// +// +// Unmarshaling an Any +// +// With a populated Any message, the underlying message can be serialized into +// a remote concrete message value in a few ways. +// +// If the exact concrete type is known, then a new (or pre-existing) instance +// of that message can be passed to the UnmarshalTo method: +// +// m := new(foopb.MyMessage) +// if err := any.UnmarshalTo(m); err != nil { +// ... // handle error +// } +// ... // make use of m +// +// If the exact concrete type is not known, then the UnmarshalNew method can be +// used to unmarshal the contents into a new instance of the remote message type: +// +// m, err := any.UnmarshalNew() +// if err != nil { +// ... // handle error +// } +// ... // make use of m +// +// UnmarshalNew uses the global type registry to resolve the message type and +// construct a new instance of that message to unmarshal into. In order for a +// message type to appear in the global registry, the Go type representing that +// protobuf message type must be linked into the Go binary. For messages +// generated by protoc-gen-go, this is achieved through an import of the +// generated Go package representing a .proto file. +// +// A common pattern with UnmarshalNew is to use a type switch with the resulting +// proto.Message value: +// +// switch m := m.(type) { +// case *foopb.MyMessage: +// ... // make use of m as a *foopb.MyMessage +// case *barpb.OtherMessage: +// ... // make use of m as a *barpb.OtherMessage +// case *bazpb.SomeMessage: +// ... // make use of m as a *bazpb.SomeMessage +// } +// +// This pattern ensures that the generated packages containing the message types +// listed in the case clauses are linked into the Go binary and therefore also +// registered in the global registry. +// +// +// Type checking an Any +// +// In order to type check whether an Any message represents some other message, +// then use the MessageIs method: +// +// if any.MessageIs((*foopb.MyMessage)(nil)) { +// ... // make use of any, knowing that it contains a foopb.MyMessage +// } +// +// The MessageIs method can also be used with an allocated instance of the target +// message type if the intention is to unmarshal into it if the type matches: +// +// m := new(foopb.MyMessage) +// if any.MessageIs(m) { +// if err := any.UnmarshalTo(m); err != nil { +// ... // handle error +// } +// ... // make use of m +// } +// package anypb import ( + proto "google.golang.org/protobuf/proto" protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoregistry "google.golang.org/protobuf/reflect/protoregistry" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" + strings "strings" sync "sync" ) @@ -158,6 +246,125 @@ type Any struct { Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } +// New marshals src into a new Any instance. +func New(src proto.Message) (*Any, error) { + dst := new(Any) + if err := dst.MarshalFrom(src); err != nil { + return nil, err + } + return dst, nil +} + +// MarshalFrom marshals src into dst as the underlying message +// using the provided marshal options. +// +// If no options are specified, call dst.MarshalFrom instead. +func MarshalFrom(dst *Any, src proto.Message, opts proto.MarshalOptions) error { + const urlPrefix = "type.googleapis.com/" + if src == nil { + return protoimpl.X.NewError("invalid nil source message") + } + b, err := opts.Marshal(src) + if err != nil { + return err + } + dst.TypeUrl = urlPrefix + string(src.ProtoReflect().Descriptor().FullName()) + dst.Value = b + return nil +} + +// UnmarshalTo unmarshals the underlying message from src into dst +// using the provided unmarshal options. +// It reports an error if dst is not of the right message type. +// +// If no options are specified, call src.UnmarshalTo instead. +func UnmarshalTo(src *Any, dst proto.Message, opts proto.UnmarshalOptions) error { + if src == nil { + return protoimpl.X.NewError("invalid nil source message") + } + if !src.MessageIs(dst) { + got := dst.ProtoReflect().Descriptor().FullName() + want := src.MessageName() + return protoimpl.X.NewError("mismatched message type: got %q, want %q", got, want) + } + return opts.Unmarshal(src.GetValue(), dst) +} + +// UnmarshalNew unmarshals the underlying message from src into dst, +// which is newly created message using a type resolved from the type URL. +// The message type is resolved according to opt.Resolver, +// which should implement protoregistry.MessageTypeResolver. +// It reports an error if the underlying message type could not be resolved. +// +// If no options are specified, call src.UnmarshalNew instead. +func UnmarshalNew(src *Any, opts proto.UnmarshalOptions) (dst proto.Message, err error) { + if src.GetTypeUrl() == "" { + return nil, protoimpl.X.NewError("invalid empty type URL") + } + if opts.Resolver == nil { + opts.Resolver = protoregistry.GlobalTypes + } + r, ok := opts.Resolver.(protoregistry.MessageTypeResolver) + if !ok { + return nil, protoregistry.NotFound + } + mt, err := r.FindMessageByURL(src.GetTypeUrl()) + if err != nil { + if err == protoregistry.NotFound { + return nil, err + } + return nil, protoimpl.X.NewError("could not resolve %q: %v", src.GetTypeUrl(), err) + } + dst = mt.New().Interface() + return dst, opts.Unmarshal(src.GetValue(), dst) +} + +// MessageIs reports whether the underlying message is of the same type as m. +func (x *Any) MessageIs(m proto.Message) bool { + if m == nil { + return false + } + url := x.GetTypeUrl() + name := string(m.ProtoReflect().Descriptor().FullName()) + if !strings.HasSuffix(url, name) { + return false + } + return len(url) == len(name) || url[len(url)-len(name)-1] == '/' +} + +// MessageName reports the full name of the underlying message, +// returning an empty string if invalid. +func (x *Any) MessageName() protoreflect.FullName { + url := x.GetTypeUrl() + name := protoreflect.FullName(url) + if i := strings.LastIndexByte(url, '/'); i >= 0 { + name = name[i+len("/"):] + } + if !name.IsValid() { + return "" + } + return name +} + +// MarshalFrom marshals m into x as the underlying message. +func (x *Any) MarshalFrom(m proto.Message) error { + return MarshalFrom(x, m, proto.MarshalOptions{}) +} + +// UnmarshalTo unmarshals the contents of the underlying message of x into m. +// It resets m before performing the unmarshal operation. +// It reports an error if m is not of the right message type. +func (x *Any) UnmarshalTo(m proto.Message) error { + return UnmarshalTo(x, m, proto.UnmarshalOptions{}) +} + +// UnmarshalNew unmarshals the contents of the underlying message of x into +// a newly allocated message of the specified type. +// It reports an error if the underlying message type could not be resolved. +func (x *Any) UnmarshalNew() (proto.Message, error) { + return UnmarshalNew(x, proto.UnmarshalOptions{}) +} + func (x *Any) Reset() { *x = Any{} if protoimpl.UnsafeEnabled { diff --git a/vendor/google.golang.org/protobuf/types/known/durationpb/duration.pb.go b/vendor/google.golang.org/protobuf/types/known/durationpb/duration.pb.go index 3997c604f41..f7a11099404 100644 --- a/vendor/google.golang.org/protobuf/types/known/durationpb/duration.pb.go +++ b/vendor/google.golang.org/protobuf/types/known/durationpb/duration.pb.go @@ -31,13 +31,58 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: google/protobuf/duration.proto +// Package durationpb contains generated types for google/protobuf/duration.proto. +// +// The Duration message represents a signed span of time. +// +// +// Conversion to a Go Duration +// +// The AsDuration method can be used to convert a Duration message to a +// standard Go time.Duration value: +// +// d := dur.AsDuration() +// ... // make use of d as a time.Duration +// +// Converting to a time.Duration is a common operation so that the extensive +// set of time-based operations provided by the time package can be leveraged. +// See https://golang.org/pkg/time for more information. +// +// The AsDuration method performs the conversion on a best-effort basis. +// Durations with denormal values (e.g., nanoseconds beyond -99999999 and +// +99999999, inclusive; or seconds and nanoseconds with opposite signs) +// are normalized during the conversion to a time.Duration. To manually check for +// invalid Duration per the documented limitations in duration.proto, +// additionally call the CheckValid method: +// +// if err := dur.CheckValid(); err != nil { +// ... // handle error +// } +// +// Note that the documented limitations in duration.proto does not protect a +// Duration from overflowing the representable range of a time.Duration in Go. +// The AsDuration method uses saturation arithmetic such that an overflow clamps +// the resulting value to the closest representable value (e.g., math.MaxInt64 +// for positive overflow and math.MinInt64 for negative overflow). +// +// +// Conversion from a Go Duration +// +// The durationpb.New function can be used to construct a Duration message +// from a standard Go time.Duration value: +// +// dur := durationpb.New(d) +// ... // make use of d as a *durationpb.Duration +// package durationpb import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" + math "math" reflect "reflect" sync "sync" + time "time" ) // A Duration represents a signed, fixed-length span of time represented @@ -118,6 +163,91 @@ type Duration struct { Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` } +// New constructs a new Duration from the provided time.Duration. +func New(d time.Duration) *Duration { + nanos := d.Nanoseconds() + secs := nanos / 1e9 + nanos -= secs * 1e9 + return &Duration{Seconds: int64(secs), Nanos: int32(nanos)} +} + +// AsDuration converts x to a time.Duration, +// returning the closest duration value in the event of overflow. +func (x *Duration) AsDuration() time.Duration { + secs := x.GetSeconds() + nanos := x.GetNanos() + d := time.Duration(secs) * time.Second + overflow := d/time.Second != time.Duration(secs) + d += time.Duration(nanos) * time.Nanosecond + overflow = overflow || (secs < 0 && nanos < 0 && d > 0) + overflow = overflow || (secs > 0 && nanos > 0 && d < 0) + if overflow { + switch { + case secs < 0: + return time.Duration(math.MinInt64) + case secs > 0: + return time.Duration(math.MaxInt64) + } + } + return d +} + +// IsValid reports whether the duration is valid. +// It is equivalent to CheckValid == nil. +func (x *Duration) IsValid() bool { + return x.check() == 0 +} + +// CheckValid returns an error if the duration is invalid. +// In particular, it checks whether the value is within the range of +// -10000 years to +10000 years inclusive. +// An error is reported for a nil Duration. +func (x *Duration) CheckValid() error { + switch x.check() { + case invalidNil: + return protoimpl.X.NewError("invalid nil Duration") + case invalidUnderflow: + return protoimpl.X.NewError("duration (%v) exceeds -10000 years", x) + case invalidOverflow: + return protoimpl.X.NewError("duration (%v) exceeds +10000 years", x) + case invalidNanosRange: + return protoimpl.X.NewError("duration (%v) has out-of-range nanos", x) + case invalidNanosSign: + return protoimpl.X.NewError("duration (%v) has seconds and nanos with different signs", x) + default: + return nil + } +} + +const ( + _ = iota + invalidNil + invalidUnderflow + invalidOverflow + invalidNanosRange + invalidNanosSign +) + +func (x *Duration) check() uint { + const absDuration = 315576000000 // 10000yr * 365.25day/yr * 24hr/day * 60min/hr * 60sec/min + secs := x.GetSeconds() + nanos := x.GetNanos() + switch { + case x == nil: + return invalidNil + case secs < -absDuration: + return invalidUnderflow + case secs > +absDuration: + return invalidOverflow + case nanos <= -1e9 || nanos >= +1e9: + return invalidNanosRange + case (secs > 0 && nanos < 0) || (secs < 0 && nanos > 0): + return invalidNanosSign + default: + return 0 + } +} + func (x *Duration) Reset() { *x = Duration{} if protoimpl.UnsafeEnabled { diff --git a/vendor/google.golang.org/protobuf/types/known/fieldmaskpb/field_mask.pb.go b/vendor/google.golang.org/protobuf/types/known/fieldmaskpb/field_mask.pb.go index 2ccf95abd04..6a8d872c085 100644 --- a/vendor/google.golang.org/protobuf/types/known/fieldmaskpb/field_mask.pb.go +++ b/vendor/google.golang.org/protobuf/types/known/fieldmaskpb/field_mask.pb.go @@ -31,12 +31,59 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: google/protobuf/field_mask.proto +// Package fieldmaskpb contains generated types for google/protobuf/field_mask.proto. +// +// The FieldMask message represents a set of symbolic field paths. +// The paths are specific to some target message type, +// which is not stored within the FieldMask message itself. +// +// +// Constructing a FieldMask +// +// The New function is used construct a FieldMask: +// +// var messageType *descriptorpb.DescriptorProto +// fm, err := fieldmaskpb.New(messageType, "field.name", "field.number") +// if err != nil { +// ... // handle error +// } +// ... // make use of fm +// +// The "field.name" and "field.number" paths are valid paths according to the +// google.protobuf.DescriptorProto message. Use of a path that does not correlate +// to valid fields reachable from DescriptorProto would result in an error. +// +// Once a FieldMask message has been constructed, +// the Append method can be used to insert additional paths to the path set: +// +// var messageType *descriptorpb.DescriptorProto +// if err := fm.Append(messageType, "options"); err != nil { +// ... // handle error +// } +// +// +// Type checking a FieldMask +// +// In order to verify that a FieldMask represents a set of fields that are +// reachable from some target message type, use the IsValid method: +// +// var messageType *descriptorpb.DescriptorProto +// if fm.IsValid(messageType) { +// ... // make use of fm +// } +// +// IsValid needs to be passed the target message type as an input since the +// FieldMask message itself does not store the message type that the set of paths +// are for. package fieldmaskpb import ( + proto "google.golang.org/protobuf/proto" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" + sort "sort" + strings "strings" sync "sync" ) @@ -248,6 +295,176 @@ type FieldMask struct { Paths []string `protobuf:"bytes,1,rep,name=paths,proto3" json:"paths,omitempty"` } +// New constructs a field mask from a list of paths and verifies that +// each one is valid according to the specified message type. +func New(m proto.Message, paths ...string) (*FieldMask, error) { + x := new(FieldMask) + return x, x.Append(m, paths...) +} + +// Union returns the union of all the paths in the input field masks. +func Union(mx *FieldMask, my *FieldMask, ms ...*FieldMask) *FieldMask { + var out []string + out = append(out, mx.GetPaths()...) + out = append(out, my.GetPaths()...) + for _, m := range ms { + out = append(out, m.GetPaths()...) + } + return &FieldMask{Paths: normalizePaths(out)} +} + +// Intersect returns the intersection of all the paths in the input field masks. +func Intersect(mx *FieldMask, my *FieldMask, ms ...*FieldMask) *FieldMask { + var ss1, ss2 []string // reused buffers for performance + intersect := func(out, in []string) []string { + ss1 = normalizePaths(append(ss1[:0], in...)) + ss2 = normalizePaths(append(ss2[:0], out...)) + out = out[:0] + for i1, i2 := 0, 0; i1 < len(ss1) && i2 < len(ss2); { + switch s1, s2 := ss1[i1], ss2[i2]; { + case hasPathPrefix(s1, s2): + out = append(out, s1) + i1++ + case hasPathPrefix(s2, s1): + out = append(out, s2) + i2++ + case lessPath(s1, s2): + i1++ + case lessPath(s2, s1): + i2++ + } + } + return out + } + + out := Union(mx, my, ms...).GetPaths() + out = intersect(out, mx.GetPaths()) + out = intersect(out, my.GetPaths()) + for _, m := range ms { + out = intersect(out, m.GetPaths()) + } + return &FieldMask{Paths: normalizePaths(out)} +} + +// IsValid reports whether all the paths are syntactically valid and +// refer to known fields in the specified message type. +// It reports false for a nil FieldMask. +func (x *FieldMask) IsValid(m proto.Message) bool { + paths := x.GetPaths() + return x != nil && numValidPaths(m, paths) == len(paths) +} + +// Append appends a list of paths to the mask and verifies that each one +// is valid according to the specified message type. +// An invalid path is not appended and breaks insertion of subsequent paths. +func (x *FieldMask) Append(m proto.Message, paths ...string) error { + numValid := numValidPaths(m, paths) + x.Paths = append(x.Paths, paths[:numValid]...) + paths = paths[numValid:] + if len(paths) > 0 { + name := m.ProtoReflect().Descriptor().FullName() + return protoimpl.X.NewError("invalid path %q for message %q", paths[0], name) + } + return nil +} + +func numValidPaths(m proto.Message, paths []string) int { + md0 := m.ProtoReflect().Descriptor() + for i, path := range paths { + md := md0 + if !rangeFields(path, func(field string) bool { + // Search the field within the message. + if md == nil { + return false // not within a message + } + fd := md.Fields().ByName(protoreflect.Name(field)) + // The real field name of a group is the message name. + if fd == nil { + gd := md.Fields().ByName(protoreflect.Name(strings.ToLower(field))) + if gd != nil && gd.Kind() == protoreflect.GroupKind && string(gd.Message().Name()) == field { + fd = gd + } + } else if fd.Kind() == protoreflect.GroupKind && string(fd.Message().Name()) != field { + fd = nil + } + if fd == nil { + return false // message has does not have this field + } + + // Identify the next message to search within. + md = fd.Message() // may be nil + if fd.IsMap() { + md = fd.MapValue().Message() // may be nil + } + return true + }) { + return i + } + } + return len(paths) +} + +// Normalize converts the mask to its canonical form where all paths are sorted +// and redundant paths are removed. +func (x *FieldMask) Normalize() { + x.Paths = normalizePaths(x.Paths) +} + +func normalizePaths(paths []string) []string { + sort.Slice(paths, func(i, j int) bool { + return lessPath(paths[i], paths[j]) + }) + + // Elide any path that is a prefix match on the previous. + out := paths[:0] + for _, path := range paths { + if len(out) > 0 && hasPathPrefix(path, out[len(out)-1]) { + continue + } + out = append(out, path) + } + return out +} + +// hasPathPrefix is like strings.HasPrefix, but further checks for either +// an exact matche or that the prefix is delimited by a dot. +func hasPathPrefix(path, prefix string) bool { + return strings.HasPrefix(path, prefix) && (len(path) == len(prefix) || path[len(prefix)] == '.') +} + +// lessPath is a lexicographical comparison where dot is specially treated +// as the smallest symbol. +func lessPath(x, y string) bool { + for i := 0; i < len(x) && i < len(y); i++ { + if x[i] != y[i] { + return (x[i] - '.') < (y[i] - '.') + } + } + return len(x) < len(y) +} + +// rangeFields is like strings.Split(path, "."), but avoids allocations by +// iterating over each field in place and calling a iterator function. +func rangeFields(path string, f func(field string) bool) bool { + for { + var field string + if i := strings.IndexByte(path, '.'); i >= 0 { + field, path = path[:i], path[i:] + } else { + field, path = path, "" + } + + if !f(field) { + return false + } + + if len(path) == 0 { + return true + } + path = strings.TrimPrefix(path, ".") + } +} + func (x *FieldMask) Reset() { *x = FieldMask{} if protoimpl.UnsafeEnabled { diff --git a/vendor/google.golang.org/protobuf/types/known/structpb/struct.pb.go b/vendor/google.golang.org/protobuf/types/known/structpb/struct.pb.go index df098137b63..7433a4c41c1 100644 --- a/vendor/google.golang.org/protobuf/types/known/structpb/struct.pb.go +++ b/vendor/google.golang.org/protobuf/types/known/structpb/struct.pb.go @@ -31,13 +31,105 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: google/protobuf/struct.proto +// Package structpb contains generated types for google/protobuf/struct.proto. +// +// The messages (i.e., Value, Struct, and ListValue) defined in struct.proto are +// used to represent arbitrary JSON. The Value message represents a JSON value, +// the Struct message represents a JSON object, and the ListValue message +// represents a JSON array. See https://json.org for more information. +// +// The Value, Struct, and ListValue types have generated MarshalJSON and +// UnmarshalJSON methods such that they serialize JSON equivalent to what the +// messages themselves represent. Use of these types with the +// "google.golang.org/protobuf/encoding/protojson" package +// ensures that they will be serialized as their JSON equivalent. +// +// +// Conversion to and from a Go interface +// +// The standard Go "encoding/json" package has functionality to serialize +// arbitrary types to a large degree. The Value.AsInterface, Struct.AsMap, and +// ListValue.AsSlice methods can convert the protobuf message representation into +// a form represented by interface{}, map[string]interface{}, and []interface{}. +// This form can be used with other packages that operate on such data structures +// and also directly with the standard json package. +// +// In order to convert the interface{}, map[string]interface{}, and []interface{} +// forms back as Value, Struct, and ListValue messages, use the NewStruct, +// NewList, and NewValue constructor functions. +// +// +// Example usage +// +// Consider the following example JSON object: +// +// { +// "firstName": "John", +// "lastName": "Smith", +// "isAlive": true, +// "age": 27, +// "address": { +// "streetAddress": "21 2nd Street", +// "city": "New York", +// "state": "NY", +// "postalCode": "10021-3100" +// }, +// "phoneNumbers": [ +// { +// "type": "home", +// "number": "212 555-1234" +// }, +// { +// "type": "office", +// "number": "646 555-4567" +// } +// ], +// "children": [], +// "spouse": null +// } +// +// To construct a Value message representing the above JSON object: +// +// m, err := structpb.NewValue(map[string]interface{}{ +// "firstName": "John", +// "lastName": "Smith", +// "isAlive": true, +// "age": 27, +// "address": map[string]interface{}{ +// "streetAddress": "21 2nd Street", +// "city": "New York", +// "state": "NY", +// "postalCode": "10021-3100", +// }, +// "phoneNumbers": []interface{}{ +// map[string]interface{}{ +// "type": "home", +// "number": "212 555-1234", +// }, +// map[string]interface{}{ +// "type": "office", +// "number": "646 555-4567", +// }, +// }, +// "children": []interface{}{}, +// "spouse": nil, +// }) +// if err != nil { +// ... // handle error +// } +// ... // make use of m as a *structpb.Value +// package structpb import ( + base64 "encoding/base64" + protojson "google.golang.org/protobuf/encoding/protojson" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" + math "math" reflect "reflect" sync "sync" + utf8 "unicode/utf8" ) // `NullValue` is a singleton enumeration to represent the null value for the @@ -105,6 +197,42 @@ type Struct struct { Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } +// NewStruct constructs a Struct from a general-purpose Go map. +// The map keys must be valid UTF-8. +// The map values are converted using NewValue. +func NewStruct(v map[string]interface{}) (*Struct, error) { + x := &Struct{Fields: make(map[string]*Value, len(v))} + for k, v := range v { + if !utf8.ValidString(k) { + return nil, protoimpl.X.NewError("invalid UTF-8 in string: %q", k) + } + var err error + x.Fields[k], err = NewValue(v) + if err != nil { + return nil, err + } + } + return x, nil +} + +// AsMap converts x to a general-purpose Go map. +// The map values are converted by calling Value.AsInterface. +func (x *Struct) AsMap() map[string]interface{} { + vs := make(map[string]interface{}) + for k, v := range x.GetFields() { + vs[k] = v.AsInterface() + } + return vs +} + +func (x *Struct) MarshalJSON() ([]byte, error) { + return protojson.Marshal(x) +} + +func (x *Struct) UnmarshalJSON(b []byte) error { + return protojson.Unmarshal(b, x) +} + func (x *Struct) Reset() { *x = Struct{} if protoimpl.UnsafeEnabled { @@ -167,6 +295,151 @@ type Value struct { Kind isValue_Kind `protobuf_oneof:"kind"` } +// NewValue constructs a Value from a general-purpose Go interface. +// +// ╔════════════════════════╤════════════════════════════════════════════╗ +// ║ Go type │ Conversion ║ +// ╠════════════════════════╪════════════════════════════════════════════╣ +// ║ nil │ stored as NullValue ║ +// ║ bool │ stored as BoolValue ║ +// ║ int, int32, int64 │ stored as NumberValue ║ +// ║ uint, uint32, uint64 │ stored as NumberValue ║ +// ║ float32, float64 │ stored as NumberValue ║ +// ║ string │ stored as StringValue; must be valid UTF-8 ║ +// ║ []byte │ stored as StringValue; base64-encoded ║ +// ║ map[string]interface{} │ stored as StructValue ║ +// ║ []interface{} │ stored as ListValue ║ +// ╚════════════════════════╧════════════════════════════════════════════╝ +// +// When converting an int64 or uint64 to a NumberValue, numeric precision loss +// is possible since they are stored as a float64. +func NewValue(v interface{}) (*Value, error) { + switch v := v.(type) { + case nil: + return NewNullValue(), nil + case bool: + return NewBoolValue(v), nil + case int: + return NewNumberValue(float64(v)), nil + case int32: + return NewNumberValue(float64(v)), nil + case int64: + return NewNumberValue(float64(v)), nil + case uint: + return NewNumberValue(float64(v)), nil + case uint32: + return NewNumberValue(float64(v)), nil + case uint64: + return NewNumberValue(float64(v)), nil + case float32: + return NewNumberValue(float64(v)), nil + case float64: + return NewNumberValue(float64(v)), nil + case string: + if !utf8.ValidString(v) { + return nil, protoimpl.X.NewError("invalid UTF-8 in string: %q", v) + } + return NewStringValue(v), nil + case []byte: + s := base64.StdEncoding.EncodeToString(v) + return NewStringValue(s), nil + case map[string]interface{}: + v2, err := NewStruct(v) + if err != nil { + return nil, err + } + return NewStructValue(v2), nil + case []interface{}: + v2, err := NewList(v) + if err != nil { + return nil, err + } + return NewListValue(v2), nil + default: + return nil, protoimpl.X.NewError("invalid type: %T", v) + } +} + +// NewNullValue constructs a new null Value. +func NewNullValue() *Value { + return &Value{Kind: &Value_NullValue{NullValue: NullValue_NULL_VALUE}} +} + +// NewBoolValue constructs a new boolean Value. +func NewBoolValue(v bool) *Value { + return &Value{Kind: &Value_BoolValue{BoolValue: v}} +} + +// NewNumberValue constructs a new number Value. +func NewNumberValue(v float64) *Value { + return &Value{Kind: &Value_NumberValue{NumberValue: v}} +} + +// NewStringValue constructs a new string Value. +func NewStringValue(v string) *Value { + return &Value{Kind: &Value_StringValue{StringValue: v}} +} + +// NewStructValue constructs a new struct Value. +func NewStructValue(v *Struct) *Value { + return &Value{Kind: &Value_StructValue{StructValue: v}} +} + +// NewListValue constructs a new list Value. +func NewListValue(v *ListValue) *Value { + return &Value{Kind: &Value_ListValue{ListValue: v}} +} + +// AsInterface converts x to a general-purpose Go interface. +// +// Calling Value.MarshalJSON and "encoding/json".Marshal on this output produce +// semantically equivalent JSON (assuming no errors occur). +// +// Floating-point values (i.e., "NaN", "Infinity", and "-Infinity") are +// converted as strings to remain compatible with MarshalJSON. +func (x *Value) AsInterface() interface{} { + switch v := x.GetKind().(type) { + case *Value_NumberValue: + if v != nil { + switch { + case math.IsNaN(v.NumberValue): + return "NaN" + case math.IsInf(v.NumberValue, +1): + return "Infinity" + case math.IsInf(v.NumberValue, -1): + return "-Infinity" + default: + return v.NumberValue + } + } + case *Value_StringValue: + if v != nil { + return v.StringValue + } + case *Value_BoolValue: + if v != nil { + return v.BoolValue + } + case *Value_StructValue: + if v != nil { + return v.StructValue.AsMap() + } + case *Value_ListValue: + if v != nil { + return v.ListValue.AsSlice() + } + } + return nil +} + +func (x *Value) MarshalJSON() ([]byte, error) { + return protojson.Marshal(x) +} + +func (x *Value) UnmarshalJSON(b []byte) error { + return protojson.Unmarshal(b, x) +} + func (x *Value) Reset() { *x = Value{} if protoimpl.UnsafeEnabled { @@ -306,6 +579,38 @@ type ListValue struct { Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` } +// NewList constructs a ListValue from a general-purpose Go slice. +// The slice elements are converted using NewValue. +func NewList(v []interface{}) (*ListValue, error) { + x := &ListValue{Values: make([]*Value, len(v))} + for i, v := range v { + var err error + x.Values[i], err = NewValue(v) + if err != nil { + return nil, err + } + } + return x, nil +} + +// AsSlice converts x to a general-purpose Go slice. +// The slice elements are converted by calling Value.AsInterface. +func (x *ListValue) AsSlice() []interface{} { + vs := make([]interface{}, len(x.GetValues())) + for i, v := range x.GetValues() { + vs[i] = v.AsInterface() + } + return vs +} + +func (x *ListValue) MarshalJSON() ([]byte, error) { + return protojson.Marshal(x) +} + +func (x *ListValue) UnmarshalJSON(b []byte) error { + return protojson.Unmarshal(b, x) +} + func (x *ListValue) Reset() { *x = ListValue{} if protoimpl.UnsafeEnabled { diff --git a/vendor/google.golang.org/protobuf/types/known/timestamppb/timestamp.pb.go b/vendor/google.golang.org/protobuf/types/known/timestamppb/timestamp.pb.go index 6fe6d42f17a..c25e4bd7d0d 100644 --- a/vendor/google.golang.org/protobuf/types/known/timestamppb/timestamp.pb.go +++ b/vendor/google.golang.org/protobuf/types/known/timestamppb/timestamp.pb.go @@ -31,6 +31,48 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // source: google/protobuf/timestamp.proto +// Package timestamppb contains generated types for google/protobuf/timestamp.proto. +// +// The Timestamp message represents a timestamp, +// an instant in time since the Unix epoch (January 1st, 1970). +// +// +// Conversion to a Go Time +// +// The AsTime method can be used to convert a Timestamp message to a +// standard Go time.Time value in UTC: +// +// t := ts.AsTime() +// ... // make use of t as a time.Time +// +// Converting to a time.Time is a common operation so that the extensive +// set of time-based operations provided by the time package can be leveraged. +// See https://golang.org/pkg/time for more information. +// +// The AsTime method performs the conversion on a best-effort basis. Timestamps +// with denormal values (e.g., nanoseconds beyond 0 and 99999999, inclusive) +// are normalized during the conversion to a time.Time. To manually check for +// invalid Timestamps per the documented limitations in timestamp.proto, +// additionally call the CheckValid method: +// +// if err := ts.CheckValid(); err != nil { +// ... // handle error +// } +// +// +// Conversion from a Go Time +// +// The timestamppb.New function can be used to construct a Timestamp message +// from a standard Go time.Time value: +// +// ts := timestamppb.New(t) +// ... // make use of ts as a *timestamppb.Timestamp +// +// In order to construct a Timestamp representing the current time, use Now: +// +// ts := timestamppb.Now() +// ... // make use of ts as a *timestamppb.Timestamp +// package timestamppb import ( @@ -38,6 +80,7 @@ import ( protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" + time "time" ) // A Timestamp represents a point in time independent of any time zone or local @@ -140,6 +183,73 @@ type Timestamp struct { Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` } +// Now constructs a new Timestamp from the current time. +func Now() *Timestamp { + return New(time.Now()) +} + +// New constructs a new Timestamp from the provided time.Time. +func New(t time.Time) *Timestamp { + return &Timestamp{Seconds: int64(t.Unix()), Nanos: int32(t.Nanosecond())} +} + +// AsTime converts x to a time.Time. +func (x *Timestamp) AsTime() time.Time { + return time.Unix(int64(x.GetSeconds()), int64(x.GetNanos())).UTC() +} + +// IsValid reports whether the timestamp is valid. +// It is equivalent to CheckValid == nil. +func (x *Timestamp) IsValid() bool { + return x.check() == 0 +} + +// CheckValid returns an error if the timestamp is invalid. +// In particular, it checks whether the value represents a date that is +// in the range of 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. +// An error is reported for a nil Timestamp. +func (x *Timestamp) CheckValid() error { + switch x.check() { + case invalidNil: + return protoimpl.X.NewError("invalid nil Timestamp") + case invalidUnderflow: + return protoimpl.X.NewError("timestamp (%v) before 0001-01-01", x) + case invalidOverflow: + return protoimpl.X.NewError("timestamp (%v) after 9999-12-31", x) + case invalidNanos: + return protoimpl.X.NewError("timestamp (%v) has out-of-range nanos", x) + default: + return nil + } +} + +const ( + _ = iota + invalidNil + invalidUnderflow + invalidOverflow + invalidNanos +) + +func (x *Timestamp) check() uint { + const minTimestamp = -62135596800 // Seconds between 1970-01-01T00:00:00Z and 0001-01-01T00:00:00Z, inclusive + const maxTimestamp = +253402300799 // Seconds between 1970-01-01T00:00:00Z and 9999-12-31T23:59:59Z, inclusive + secs := x.GetSeconds() + nanos := x.GetNanos() + switch { + case x == nil: + return invalidNil + case secs < minTimestamp: + return invalidUnderflow + case secs > maxTimestamp: + return invalidOverflow + case nanos < 0 || nanos >= 1e9: + return invalidNanos + default: + return 0 + } +} + func (x *Timestamp) Reset() { *x = Timestamp{} if protoimpl.UnsafeEnabled { diff --git a/vendor/google.golang.org/protobuf/types/known/wrapperspb/wrappers.pb.go b/vendor/google.golang.org/protobuf/types/known/wrapperspb/wrappers.pb.go index 5c5ec2f1021..2355adf428e 100644 --- a/vendor/google.golang.org/protobuf/types/known/wrapperspb/wrappers.pb.go +++ b/vendor/google.golang.org/protobuf/types/known/wrapperspb/wrappers.pb.go @@ -62,6 +62,11 @@ type DoubleValue struct { Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` } +// Double stores v in a new DoubleValue and returns a pointer to it. +func Double(v float64) *DoubleValue { + return &DoubleValue{Value: v} +} + func (x *DoubleValue) Reset() { *x = DoubleValue{} if protoimpl.UnsafeEnabled { @@ -113,6 +118,11 @@ type FloatValue struct { Value float32 `protobuf:"fixed32,1,opt,name=value,proto3" json:"value,omitempty"` } +// Float stores v in a new FloatValue and returns a pointer to it. +func Float(v float32) *FloatValue { + return &FloatValue{Value: v} +} + func (x *FloatValue) Reset() { *x = FloatValue{} if protoimpl.UnsafeEnabled { @@ -164,6 +174,11 @@ type Int64Value struct { Value int64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` } +// Int64 stores v in a new Int64Value and returns a pointer to it. +func Int64(v int64) *Int64Value { + return &Int64Value{Value: v} +} + func (x *Int64Value) Reset() { *x = Int64Value{} if protoimpl.UnsafeEnabled { @@ -215,6 +230,11 @@ type UInt64Value struct { Value uint64 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` } +// UInt64 stores v in a new UInt64Value and returns a pointer to it. +func UInt64(v uint64) *UInt64Value { + return &UInt64Value{Value: v} +} + func (x *UInt64Value) Reset() { *x = UInt64Value{} if protoimpl.UnsafeEnabled { @@ -266,6 +286,11 @@ type Int32Value struct { Value int32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` } +// Int32 stores v in a new Int32Value and returns a pointer to it. +func Int32(v int32) *Int32Value { + return &Int32Value{Value: v} +} + func (x *Int32Value) Reset() { *x = Int32Value{} if protoimpl.UnsafeEnabled { @@ -317,6 +342,11 @@ type UInt32Value struct { Value uint32 `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` } +// UInt32 stores v in a new UInt32Value and returns a pointer to it. +func UInt32(v uint32) *UInt32Value { + return &UInt32Value{Value: v} +} + func (x *UInt32Value) Reset() { *x = UInt32Value{} if protoimpl.UnsafeEnabled { @@ -368,6 +398,11 @@ type BoolValue struct { Value bool `protobuf:"varint,1,opt,name=value,proto3" json:"value,omitempty"` } +// Bool stores v in a new BoolValue and returns a pointer to it. +func Bool(v bool) *BoolValue { + return &BoolValue{Value: v} +} + func (x *BoolValue) Reset() { *x = BoolValue{} if protoimpl.UnsafeEnabled { @@ -419,6 +454,11 @@ type StringValue struct { Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` } +// String stores v in a new StringValue and returns a pointer to it. +func String(v string) *StringValue { + return &StringValue{Value: v} +} + func (x *StringValue) Reset() { *x = StringValue{} if protoimpl.UnsafeEnabled { @@ -470,6 +510,11 @@ type BytesValue struct { Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` } +// Bytes stores v in a new BytesValue and returns a pointer to it. +func Bytes(v []byte) *BytesValue { + return &BytesValue{Value: v} +} + func (x *BytesValue) Reset() { *x = BytesValue{} if protoimpl.UnsafeEnabled { diff --git a/vendor/modules.txt b/vendor/modules.txt index dfd9e77beb0..38c09743b9c 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -221,7 +221,7 @@ github.com/spf13/cobra # github.com/spf13/pflag v1.0.1 ## explicit github.com/spf13/pflag -# github.com/stretchr/testify v1.2.2 +# github.com/stretchr/testify v1.5.1 ## explicit github.com/stretchr/testify/assert github.com/stretchr/testify/require @@ -298,7 +298,7 @@ google.golang.org/genproto/protobuf/api google.golang.org/genproto/protobuf/field_mask google.golang.org/genproto/protobuf/ptype google.golang.org/genproto/protobuf/source_context -# google.golang.org/grpc v1.31.1 +# google.golang.org/grpc v1.36.1 ## explicit google.golang.org/grpc google.golang.org/grpc/attributes @@ -311,7 +311,6 @@ google.golang.org/grpc/binarylog/grpc_binarylog_v1 google.golang.org/grpc/codes google.golang.org/grpc/connectivity google.golang.org/grpc/credentials -google.golang.org/grpc/credentials/internal google.golang.org/grpc/encoding google.golang.org/grpc/encoding/proto google.golang.org/grpc/grpclog @@ -327,12 +326,16 @@ google.golang.org/grpc/internal/grpclog google.golang.org/grpc/internal/grpcrand google.golang.org/grpc/internal/grpcsync google.golang.org/grpc/internal/grpcutil +google.golang.org/grpc/internal/metadata +google.golang.org/grpc/internal/resolver google.golang.org/grpc/internal/resolver/dns google.golang.org/grpc/internal/resolver/passthrough +google.golang.org/grpc/internal/resolver/unix google.golang.org/grpc/internal/serviceconfig google.golang.org/grpc/internal/status google.golang.org/grpc/internal/syscall google.golang.org/grpc/internal/transport +google.golang.org/grpc/internal/transport/networktype google.golang.org/grpc/keepalive google.golang.org/grpc/metadata google.golang.org/grpc/peer @@ -342,14 +345,13 @@ google.golang.org/grpc/stats google.golang.org/grpc/status google.golang.org/grpc/tap google.golang.org/grpc/test/grpc_testing -# google.golang.org/protobuf v1.24.0 +# google.golang.org/protobuf v1.25.0 ## explicit google.golang.org/protobuf/encoding/protojson google.golang.org/protobuf/encoding/prototext google.golang.org/protobuf/encoding/protowire google.golang.org/protobuf/internal/descfmt google.golang.org/protobuf/internal/descopts -google.golang.org/protobuf/internal/detectknown google.golang.org/protobuf/internal/detrand google.golang.org/protobuf/internal/encoding/defval google.golang.org/protobuf/internal/encoding/json @@ -357,12 +359,11 @@ google.golang.org/protobuf/internal/encoding/messageset google.golang.org/protobuf/internal/encoding/tag google.golang.org/protobuf/internal/encoding/text google.golang.org/protobuf/internal/errors -google.golang.org/protobuf/internal/fieldnum google.golang.org/protobuf/internal/fieldsort google.golang.org/protobuf/internal/filedesc google.golang.org/protobuf/internal/filetype google.golang.org/protobuf/internal/flags -google.golang.org/protobuf/internal/genname +google.golang.org/protobuf/internal/genid google.golang.org/protobuf/internal/impl google.golang.org/protobuf/internal/mapsort google.golang.org/protobuf/internal/pragma From 659551da1be64f287845b6731f5363472d87d983 Mon Sep 17 00:00:00 2001 From: Mihail Stoykov Date: Tue, 30 Mar 2021 17:20:25 +0300 Subject: [PATCH 2/2] Maintenance update of github.com/jhump/protoreflect to v1.8.1 from v1.7.0 This also fixes #1928 among a lot of other bugfixes including more panics --- go.mod | 4 +- go.sum | 16 +- .../jhump/protoreflect/codec/encode_fields.go | 1 + .../jhump/protoreflect/desc/descriptor.go | 5 +- .../jhump/protoreflect/desc/protoparse/ast.go | 1206 +------- .../protoreflect/desc/protoparse/ast/doc.go | 27 + .../protoreflect/desc/protoparse/ast/enum.go | 154 ++ .../protoreflect/desc/protoparse/ast/field.go | 608 ++++ .../protoreflect/desc/protoparse/ast/file.go | 234 ++ .../desc/protoparse/ast/identifiers.go | 134 + .../desc/protoparse/ast/message.go | 198 ++ .../desc/protoparse/ast/no_source.go | 103 + .../protoreflect/desc/protoparse/ast/node.go | 200 ++ .../desc/protoparse/ast/options.go | 300 ++ .../protoreflect/desc/protoparse/ast/print.go | 86 + .../desc/protoparse/ast/ranges.go | 305 ++ .../desc/protoparse/ast/service.go | 273 ++ .../desc/protoparse/ast/source_pos.go | 38 + .../desc/protoparse/ast/values.go | 563 ++++ .../protoreflect/desc/protoparse/ast/walk.go | 492 ++++ .../desc/protoparse/descriptor_protos.go | 366 +-- .../protoreflect/desc/protoparse/errors.go | 3 + .../protoreflect/desc/protoparse/lexer.go | 175 +- .../protoreflect/desc/protoparse/linker.go | 51 +- .../protoreflect/desc/protoparse/options.go | 125 +- .../protoreflect/desc/protoparse/parser.go | 331 ++- .../protoreflect/desc/protoparse/proto.y | 1167 +++++--- .../protoreflect/desc/protoparse/proto.y.go | 2459 +++++++++-------- .../desc/protoparse/source_code_info.go | 378 ++- .../protoreflect/desc/protoparse/validate.go | 89 +- .../protoreflect/dynamic/dynamic_message.go | 19 +- .../protobuf/encoding/protojson/decode.go | 32 +- .../protobuf/encoding/protojson/encode.go | 228 +- .../encoding/protojson/well_known_types.go | 25 +- .../protobuf/encoding/prototext/decode.go | 24 +- .../protobuf/encoding/prototext/encode.go | 84 +- .../protobuf/internal/detrand/rand.go | 8 + .../encoding/messageset/messageset.go | 31 +- .../protobuf/internal/encoding/tag/tag.go | 2 +- .../protobuf/internal/fieldsort/fieldsort.go | 40 - .../protobuf/internal/filedesc/build.go | 3 + .../protobuf/internal/filedesc/desc.go | 68 +- .../protobuf/internal/filedesc/desc_lazy.go | 4 +- .../protobuf/internal/filedesc/desc_list.go | 172 +- .../internal/filedesc/desc_list_gen.go | 11 + .../protobuf/internal/impl/api_export.go | 2 +- .../protobuf/internal/impl/codec_field.go | 18 +- .../protobuf/internal/impl/codec_gen.go | 276 +- .../protobuf/internal/impl/codec_map.go | 19 +- .../protobuf/internal/impl/codec_message.go | 47 +- .../internal/impl/codec_messageset.go | 21 +- .../protobuf/internal/impl/codec_reflect.go | 8 +- .../protobuf/internal/impl/decode.go | 16 +- .../protobuf/internal/impl/encode.go | 10 +- .../protobuf/internal/impl/legacy_export.go | 2 +- .../internal/impl/legacy_extension.go | 3 +- .../protobuf/internal/impl/legacy_message.go | 14 +- .../protobuf/internal/impl/merge.go | 6 +- .../protobuf/internal/impl/message.go | 69 +- .../protobuf/internal/impl/message_reflect.go | 91 +- .../protobuf/internal/impl/pointer_reflect.go | 1 + .../protobuf/internal/impl/pointer_unsafe.go | 1 + .../protobuf/internal/mapsort/mapsort.go | 43 - .../protobuf/internal/order/order.go | 89 + .../protobuf/internal/order/range.go | 115 + .../protobuf/internal/version/version.go | 2 +- .../protobuf/proto/decode.go | 16 +- .../protobuf/proto/decode_gen.go | 128 +- .../protobuf/proto/encode.go | 55 +- .../protobuf/proto/messageset.go | 7 +- .../google.golang.org/protobuf/proto/proto.go | 9 + .../protobuf/reflect/protodesc/desc.go | 1 + .../protobuf/reflect/protodesc/desc_init.go | 4 +- .../reflect/protodesc/desc_validate.go | 3 + .../protobuf/reflect/protodesc/proto.go | 10 +- .../protobuf/reflect/protoreflect/source.go | 84 +- .../reflect/protoreflect/source_gen.go | 461 +++ .../protobuf/reflect/protoreflect/type.go | 34 + .../reflect/protoregistry/registry.go | 46 +- .../protobuf/types/dynamicpb/dynamic.go | 12 + .../types/known/fieldmaskpb/field_mask.pb.go | 7 +- vendor/modules.txt | 8 +- 82 files changed, 8506 insertions(+), 4074 deletions(-) create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/doc.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/enum.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/field.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/file.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/identifiers.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/message.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/no_source.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/node.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/options.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/print.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/ranges.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/service.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/source_pos.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/values.go create mode 100644 vendor/github.com/jhump/protoreflect/desc/protoparse/ast/walk.go delete mode 100644 vendor/google.golang.org/protobuf/internal/fieldsort/fieldsort.go delete mode 100644 vendor/google.golang.org/protobuf/internal/mapsort/mapsort.go create mode 100644 vendor/google.golang.org/protobuf/internal/order/order.go create mode 100644 vendor/google.golang.org/protobuf/internal/order/range.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go diff --git a/go.mod b/go.mod index eed170fd78f..b301a878294 100644 --- a/go.mod +++ b/go.mod @@ -33,7 +33,7 @@ require ( github.com/gorilla/websocket v1.4.2 github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/influxdata/influxdb1-client v0.0.0-20190402204710-8ff2fc3824fc - github.com/jhump/protoreflect v1.7.0 + github.com/jhump/protoreflect v1.8.2 github.com/julienschmidt/httprouter v1.1.1-0.20180222160526-d18983907793 github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1 // indirect github.com/kelseyhightower/envconfig v1.4.0 @@ -77,7 +77,7 @@ require ( golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/genproto v0.0.0-20200903010400-9bfcb5116336 // indirect google.golang.org/grpc v1.36.1 - google.golang.org/protobuf v1.25.0 + google.golang.org/protobuf v1.25.1-0.20200805231151-a709e31e5d12 gopkg.in/go-playground/assert.v1 v1.2.1 // indirect gopkg.in/go-playground/validator.v8 v8.18.2 // indirect gopkg.in/guregu/null.v2 v2.1.2 // indirect diff --git a/go.sum b/go.sum index c56606b59b1..b1ba2faa144 100644 --- a/go.sum +++ b/go.sum @@ -62,7 +62,6 @@ github.com/golang/gddo v0.0.0-20190419222130-af0f2af80721/go.mod h1:xEhNfoBDX1hz github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= @@ -95,8 +94,8 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/influxdata/influxdb1-client v0.0.0-20190402204710-8ff2fc3824fc h1:KpMgaYJRieDkHZJWY3LMafvtqS/U8xX6+lUN+OKpl/Y= github.com/influxdata/influxdb1-client v0.0.0-20190402204710-8ff2fc3824fc/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= -github.com/jhump/protoreflect v1.7.0 h1:qJ7piXPrjP3mDrfHf5ATkxfLix8ANs226vpo0aACOn0= -github.com/jhump/protoreflect v1.7.0/go.mod h1:RZkzh7Hi9J7qT/sPlWnJ/UwZqCJvciFxKDA0UCeltSM= +github.com/jhump/protoreflect v1.8.2 h1:k2xE7wcUomeqwY0LDCYA16y4WWfyTcMx5mKhk0d4ua0= +github.com/jhump/protoreflect v1.8.2/go.mod h1:7GcYQDdMU/O/BBrl/cX6PNHpXh6cenjd8pneu5yW7Tg= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/julienschmidt/httprouter v1.1.1-0.20180222160526-d18983907793 h1:C70cJzQWvJh+6Rw8q2Qngi8J8uc1BC6FqJneu0f2pr8= @@ -192,11 +191,13 @@ github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyC github.com/valyala/fasttemplate v0.0.0-20170224212429-dcecefd839c4 h1:gKMu1Bf6QINDnvyZuTaACm9ofY+PRh+5vFz4oxBZeF8= github.com/valyala/fasttemplate v0.0.0-20170224212429-dcecefd839c4/go.mod h1:50wTf68f99/Zt14pr046Tgt3Lp2vLyFZKzbFXTOabXw= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/zyedidia/highlight v0.0.0-20170330143449-201131ce5cf5 h1:Zs6mpwXvlqpF9zHl5XaN0p5V4J9XvP+WBuiuXyIgqvc= github.com/zyedidia/highlight v0.0.0-20170330143449-201131ce5cf5/go.mod h1:c1r+Ob9tUTPB0FKWO1+x+Hsc/zNa45WdGq7Y38Ybip0= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897 h1:pLI5jrR7OSLijeIDcmRxNmw2api+jEfxLoykJVice/E= golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -205,6 +206,7 @@ golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvx golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -215,6 +217,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11 h1:lwlPPsmjDKK0J6eG6xDWd5XPehI0R024zxjDnw3esPA= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -222,6 +225,7 @@ golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -251,6 +255,7 @@ golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200522201501-cb1345f3a375/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200717024301-6ddee64345a6/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -258,13 +263,11 @@ golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1N golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200903010400-9bfcb5116336 h1:ZcAny/XH59BbzUOKydQpvIlklwibW3T9SvDE5cGhdzc= google.golang.org/genproto v0.0.0-20200903010400-9bfcb5116336/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= @@ -280,8 +283,9 @@ google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2 google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.25.1-0.20200805231151-a709e31e5d12 h1:OwhZOOMuf7leLaSCuxtQ9FW7ui2L2L6UKOtKAUqovUQ= +google.golang.org/protobuf v1.25.1-0.20200805231151-a709e31e5d12/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/vendor/github.com/jhump/protoreflect/codec/encode_fields.go b/vendor/github.com/jhump/protoreflect/codec/encode_fields.go index 539c3fc9529..499aa9564a1 100644 --- a/vendor/github.com/jhump/protoreflect/codec/encode_fields.go +++ b/vendor/github.com/jhump/protoreflect/codec/encode_fields.go @@ -34,6 +34,7 @@ func (cb *Buffer) EncodeFieldValue(fd *desc.FieldDescriptor, val interface{}) er valType := entryType.FindFieldByNumber(2) var entryBuffer Buffer if cb.IsDeterministic() { + entryBuffer.SetDeterministic(true) keys := make([]interface{}, 0, len(mp)) for k := range mp { keys = append(keys, k) diff --git a/vendor/github.com/jhump/protoreflect/desc/descriptor.go b/vendor/github.com/jhump/protoreflect/desc/descriptor.go index a6a09142493..42f0f8eb126 100644 --- a/vendor/github.com/jhump/protoreflect/desc/descriptor.go +++ b/vendor/github.com/jhump/protoreflect/desc/descriptor.go @@ -995,8 +995,9 @@ func (fd *FieldDescriptor) IsRepeated() bool { } // IsProto3Optional returns true if this field has an explicit "optional" label -// and is in a "proto3" syntax file. Such fields will be nested in synthetic -// oneofs that contain only the single field. +// and is in a "proto3" syntax file. Such fields, if they are normal fields (not +// extensions), will be nested in synthetic oneofs that contain only the single +// field. func (fd *FieldDescriptor) IsProto3Optional() bool { return internal.GetProto3Optional(fd.proto) } diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go index 6980e93d1a1..6a39e754710 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast.go @@ -1,1127 +1,205 @@ package protoparse -import "fmt" +import "github.com/jhump/protoreflect/desc/protoparse/ast" -// This file defines all of the nodes in the proto AST. +// SourcePos is the same as ast.SourcePos. This alias exists for +// backwards compatibility (SourcePos used to be defined in this package.) +type SourcePos = ast.SourcePos -// SourcePos identifies a location in a proto source file. -type SourcePos struct { - Filename string - Line, Col int - Offset int -} - -func (pos SourcePos) String() string { - if pos.Line <= 0 || pos.Col <= 0 { - return pos.Filename - } - return fmt.Sprintf("%s:%d:%d", pos.Filename, pos.Line, pos.Col) -} - -func unknownPos(filename string) *SourcePos { - return &SourcePos{Filename: filename} -} - -// node is the interface implemented by all nodes in the AST -type node interface { - start() *SourcePos - end() *SourcePos - leadingComments() []comment - trailingComments() []comment -} - -type terminalNode interface { - node - popLeadingComment() comment - pushTrailingComment(comment) -} - -var _ terminalNode = (*basicNode)(nil) -var _ terminalNode = (*stringLiteralNode)(nil) -var _ terminalNode = (*intLiteralNode)(nil) -var _ terminalNode = (*floatLiteralNode)(nil) -var _ terminalNode = (*identNode)(nil) - -type fileDecl interface { - node - getSyntax() node -} - -var _ fileDecl = (*fileNode)(nil) -var _ fileDecl = (*noSourceNode)(nil) - -type optionDecl interface { - node - getName() node - getValue() valueNode -} - -var _ optionDecl = (*optionNode)(nil) -var _ optionDecl = (*noSourceNode)(nil) - -type fieldDecl interface { - node - fieldLabel() node - fieldName() node - fieldType() node - fieldTag() node - fieldExtendee() node - getGroupKeyword() node -} - -var _ fieldDecl = (*fieldNode)(nil) -var _ fieldDecl = (*groupNode)(nil) -var _ fieldDecl = (*mapFieldNode)(nil) -var _ fieldDecl = (*syntheticMapField)(nil) -var _ fieldDecl = (*noSourceNode)(nil) - -type rangeDecl interface { - node - rangeStart() node - rangeEnd() node -} - -var _ rangeDecl = (*rangeNode)(nil) -var _ rangeDecl = (*noSourceNode)(nil) - -type enumValueDecl interface { - node - getName() node - getNumber() node -} - -var _ enumValueDecl = (*enumValueNode)(nil) -var _ enumValueDecl = (*noSourceNode)(nil) - -type msgDecl interface { - node - messageName() node -} - -var _ msgDecl = (*messageNode)(nil) -var _ msgDecl = (*groupNode)(nil) -var _ msgDecl = (*mapFieldNode)(nil) -var _ msgDecl = (*noSourceNode)(nil) - -type methodDecl interface { - node - getInputType() node - getOutputType() node -} - -var _ methodDecl = (*methodNode)(nil) -var _ methodDecl = (*noSourceNode)(nil) - -type posRange struct { - start, end SourcePos -} - -type basicNode struct { - posRange - leading []comment - trailing []comment -} - -func (n *basicNode) start() *SourcePos { - return &n.posRange.start -} - -func (n *basicNode) end() *SourcePos { - return &n.posRange.end -} - -func (n *basicNode) leadingComments() []comment { - return n.leading -} - -func (n *basicNode) trailingComments() []comment { - return n.trailing -} - -func (n *basicNode) popLeadingComment() comment { - c := n.leading[0] - n.leading = n.leading[1:] - return c -} - -func (n *basicNode) pushTrailingComment(c comment) { - n.trailing = append(n.trailing, c) -} - -type comment struct { - posRange - text string -} - -type basicCompositeNode struct { - first node - last node -} - -func (n *basicCompositeNode) start() *SourcePos { - return n.first.start() -} - -func (n *basicCompositeNode) end() *SourcePos { - return n.last.end() -} - -func (n *basicCompositeNode) leadingComments() []comment { - return n.first.leadingComments() -} - -func (n *basicCompositeNode) trailingComments() []comment { - return n.last.trailingComments() -} - -func (n *basicCompositeNode) setRange(first, last node) { - n.first = first - n.last = last -} - -type fileNode struct { - basicCompositeNode - syntax *syntaxNode - decls []*fileElement - - // This field is populated after parsing, to make it easier to find - // source locations by import name for constructing link errors. - imports []*importNode -} - -func (n *fileNode) getSyntax() node { - return n.syntax -} - -type fileElement struct { - // a discriminated union: only one field will be set - imp *importNode - pkg *packageNode - option *optionNode - message *messageNode - enum *enumNode - extend *extendNode - service *serviceNode - empty *basicNode -} - -func (n *fileElement) start() *SourcePos { - return n.get().start() -} - -func (n *fileElement) end() *SourcePos { - return n.get().end() -} - -func (n *fileElement) leadingComments() []comment { - return n.get().leadingComments() -} - -func (n *fileElement) trailingComments() []comment { - return n.get().trailingComments() -} - -func (n *fileElement) get() node { - switch { - case n.imp != nil: - return n.imp - case n.pkg != nil: - return n.pkg - case n.option != nil: - return n.option - case n.message != nil: - return n.message - case n.enum != nil: - return n.enum - case n.extend != nil: - return n.extend - case n.service != nil: - return n.service - default: - return n.empty - } -} - -type syntaxNode struct { - basicCompositeNode - syntax *compoundStringNode -} - -type importNode struct { - basicCompositeNode - name *compoundStringNode - public bool - weak bool -} - -type packageNode struct { - basicCompositeNode - name *compoundIdentNode -} - -type identifier string - -type identNode struct { - basicNode - val string -} - -func (n *identNode) value() interface{} { - return identifier(n.val) -} +// the types below are accumulator types: linked lists that are +// constructed during parsing and then converted to slices of AST nodes +// once the whole list has been parsed -type compoundIdentNode struct { - basicCompositeNode - val string +type compactOptionList struct { + option *ast.OptionNode + comma *ast.RuneNode + next *compactOptionList } -func (n *compoundIdentNode) value() interface{} { - return identifier(n.val) -} - -type compactOptionsNode struct { - basicCompositeNode - decls []*optionNode -} - -func (n *compactOptionsNode) Elements() []*optionNode { - if n == nil { - return nil +func (list *compactOptionList) toNodes() ([]*ast.OptionNode, []*ast.RuneNode) { + l := 0 + for cur := list; cur != nil; cur = cur.next { + l++ } - return n.decls -} - -type optionNode struct { - basicCompositeNode - name *optionNameNode - val valueNode -} - -func (n *optionNode) getName() node { - return n.name -} - -func (n *optionNode) getValue() valueNode { - return n.val -} - -type optionNameNode struct { - basicCompositeNode - parts []*optionNamePartNode -} - -type optionNamePartNode struct { - basicCompositeNode - text *compoundIdentNode - offset int - length int - isExtension bool - st, en *SourcePos -} - -func (n *optionNamePartNode) start() *SourcePos { - if n.isExtension { - return n.basicCompositeNode.start() + opts := make([]*ast.OptionNode, l) + commas := make([]*ast.RuneNode, l-1) + for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 { + opts[i] = cur.option + if cur.comma != nil { + commas[i] = cur.comma + } } - return n.st + return opts, commas } -func (n *optionNamePartNode) end() *SourcePos { - if n.isExtension { - return n.basicCompositeNode.end() - } - return n.en +type stringList struct { + str *ast.StringLiteralNode + next *stringList } -func (n *optionNamePartNode) setRange(first, last node) { - n.basicCompositeNode.setRange(first, last) - if !n.isExtension { - st := *first.start() - st.Col += n.offset - n.st = &st - en := st - en.Col += n.length - n.en = &en +func (list *stringList) toStringValueNode() ast.StringValueNode { + if list.next == nil { + // single name + return list.str } -} - -type valueNode interface { - node - value() interface{} -} - -var _ valueNode = (*identNode)(nil) -var _ valueNode = (*compoundIdentNode)(nil) -var _ valueNode = (*stringLiteralNode)(nil) -var _ valueNode = (*compoundStringNode)(nil) -var _ valueNode = (*intLiteralNode)(nil) -var _ valueNode = (*compoundIntNode)(nil) -var _ valueNode = (*compoundUintNode)(nil) -var _ valueNode = (*floatLiteralNode)(nil) -var _ valueNode = (*compoundFloatNode)(nil) -var _ valueNode = (*boolLiteralNode)(nil) -var _ valueNode = (*sliceLiteralNode)(nil) -var _ valueNode = (*aggregateLiteralNode)(nil) -var _ valueNode = (*noSourceNode)(nil) - -type stringLiteralNode struct { - basicNode - val string -} -func (n *stringLiteralNode) value() interface{} { - return n.val -} - -type compoundStringNode struct { - basicCompositeNode - val string -} - -func (n *compoundStringNode) value() interface{} { - return n.val -} - -type intLiteral interface { - asInt32(min, max int32) (int32, bool) - value() interface{} -} - -type intLiteralNode struct { - basicNode - val uint64 -} - -var _ intLiteral = (*intLiteralNode)(nil) - -func (n *intLiteralNode) value() interface{} { - return n.val -} - -func (n *intLiteralNode) asInt32(min, max int32) (int32, bool) { - if (min >= 0 && n.val < uint64(min)) || n.val > uint64(max) { - return 0, false + l := 0 + for cur := list; cur != nil; cur = cur.next { + l++ } - return int32(n.val), true -} - -type compoundUintNode struct { - basicCompositeNode - val uint64 -} - -var _ intLiteral = (*compoundUintNode)(nil) - -func (n *compoundUintNode) value() interface{} { - return n.val -} - -func (n *compoundUintNode) asInt32(min, max int32) (int32, bool) { - if (min >= 0 && n.val < uint64(min)) || n.val > uint64(max) { - return 0, false + strs := make([]*ast.StringLiteralNode, l) + for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 { + strs[i] = cur.str } - return int32(n.val), true + return ast.NewCompoundLiteralStringNode(strs...) } -type compoundIntNode struct { - basicCompositeNode - val int64 -} - -var _ intLiteral = (*compoundIntNode)(nil) - -func (n *compoundIntNode) value() interface{} { - return n.val +type nameList struct { + name ast.StringValueNode + comma *ast.RuneNode + next *nameList } -func (n *compoundIntNode) asInt32(min, max int32) (int32, bool) { - if n.val < int64(min) || n.val > int64(max) { - return 0, false +func (list *nameList) toNodes() ([]ast.StringValueNode, []*ast.RuneNode) { + l := 0 + for cur := list; cur != nil; cur = cur.next { + l++ } - return int32(n.val), true -} - -type floatLiteralNode struct { - basicNode - val float64 -} - -func (n *floatLiteralNode) value() interface{} { - return n.val -} - -type compoundFloatNode struct { - basicCompositeNode - val float64 -} - -func (n *compoundFloatNode) value() interface{} { - return n.val -} - -type boolLiteralNode struct { - *identNode - val bool -} - -func (n *boolLiteralNode) value() interface{} { - return n.val -} - -type sliceLiteralNode struct { - basicCompositeNode - elements []valueNode -} - -func (n *sliceLiteralNode) value() interface{} { - return n.elements -} - -type aggregateLiteralNode struct { - basicCompositeNode - elements []*aggregateEntryNode -} - -func (n *aggregateLiteralNode) value() interface{} { - return n.elements -} - -type aggregateEntryNode struct { - basicCompositeNode - name *aggregateNameNode - val valueNode -} - -type aggregateNameNode struct { - basicCompositeNode - name *compoundIdentNode - isExtension bool -} - -func (a *aggregateNameNode) value() string { - if a.isExtension { - return "[" + a.name.val + "]" - } else { - return a.name.val + names := make([]ast.StringValueNode, l) + commas := make([]*ast.RuneNode, l-1) + for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 { + names[i] = cur.name + if cur.comma != nil { + commas[i] = cur.comma + } } + return names, commas } -type fieldNode struct { - basicCompositeNode - label fieldLabel - fldType *compoundIdentNode - name *identNode - tag *intLiteralNode - options *compactOptionsNode - - // This field is populated after parsing, to allow lookup of extendee source - // locations when field extendees cannot be linked. (Otherwise, this is just - // stored as a string in the field descriptors defined inside the extend - // block). - extendee *extendNode +type rangeList struct { + rng *ast.RangeNode + comma *ast.RuneNode + next *rangeList } -func (n *fieldNode) fieldLabel() node { - // proto3 fields and fields inside one-ofs will not have a label and we need - // this check in order to return a nil node -- otherwise we'd return a - // non-nil node that has a nil pointer value in it :/ - if n.label.identNode == nil { - return nil +func (list *rangeList) toNodes() ([]*ast.RangeNode, []*ast.RuneNode) { + l := 0 + for cur := list; cur != nil; cur = cur.next { + l++ } - return n.label.identNode -} - -func (n *fieldNode) fieldName() node { - return n.name -} - -func (n *fieldNode) fieldType() node { - return n.fldType -} - -func (n *fieldNode) fieldTag() node { - return n.tag -} - -func (n *fieldNode) fieldExtendee() node { - if n.extendee != nil { - return n.extendee.extendee + ranges := make([]*ast.RangeNode, l) + commas := make([]*ast.RuneNode, l-1) + for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 { + ranges[i] = cur.rng + if cur.comma != nil { + commas[i] = cur.comma + } } - return nil -} - -func (n *fieldNode) getGroupKeyword() node { - return nil + return ranges, commas } -type fieldLabel struct { - *identNode - repeated bool - required bool -} - -type groupNode struct { - basicCompositeNode - groupKeyword *identNode - label fieldLabel - name *identNode - tag *intLiteralNode - decls []*messageElement - options *compactOptionsNode - - // This field is populated after parsing, to allow lookup of extendee source - // locations when field extendees cannot be linked. (Otherwise, this is just - // stored as a string in the field descriptors defined inside the extend - // block). - extendee *extendNode +type valueList struct { + val ast.ValueNode + comma *ast.RuneNode + next *valueList } -func (n *groupNode) fieldLabel() node { - if n.label.identNode == nil { - // return nil interface to indicate absence, not a typed nil - return nil +func (list *valueList) toNodes() ([]ast.ValueNode, []*ast.RuneNode) { + if list == nil { + return nil, nil } - return n.label.identNode -} - -func (n *groupNode) fieldName() node { - return n.name -} - -func (n *groupNode) fieldType() node { - return n.groupKeyword -} - -func (n *groupNode) fieldTag() node { - return n.tag -} - -func (n *groupNode) fieldExtendee() node { - if n.extendee != nil { - return n.extendee.extendee + l := 0 + for cur := list; cur != nil; cur = cur.next { + l++ } - return nil -} - -func (n *groupNode) getGroupKeyword() node { - return n.groupKeyword -} - -func (n *groupNode) messageName() node { - return n.name -} - -type oneOfNode struct { - basicCompositeNode - name *identNode - decls []*oneOfElement -} - -type oneOfElement struct { - // a discriminated union: only one field will be set - option *optionNode - field *fieldNode - group *groupNode - empty *basicNode -} - -func (n *oneOfElement) start() *SourcePos { - return n.get().start() -} - -func (n *oneOfElement) end() *SourcePos { - return n.get().end() -} - -func (n *oneOfElement) leadingComments() []comment { - return n.get().leadingComments() -} - -func (n *oneOfElement) trailingComments() []comment { - return n.get().trailingComments() -} - -func (n *oneOfElement) get() node { - switch { - case n.option != nil: - return n.option - case n.field != nil: - return n.field - default: - return n.empty + vals := make([]ast.ValueNode, l) + commas := make([]*ast.RuneNode, l-1) + for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 { + vals[i] = cur.val + if cur.comma != nil { + commas[i] = cur.comma + } } + return vals, commas } -type mapTypeNode struct { - basicCompositeNode - mapKeyword *identNode - keyType *identNode - valueType *compoundIdentNode -} - -type mapFieldNode struct { - basicCompositeNode - mapType *mapTypeNode - name *identNode - tag *intLiteralNode - options *compactOptionsNode +type fieldRefList struct { + ref *ast.FieldReferenceNode + dot *ast.RuneNode + next *fieldRefList } -func (n *mapFieldNode) fieldLabel() node { - return nil -} - -func (n *mapFieldNode) fieldName() node { - return n.name -} - -func (n *mapFieldNode) fieldType() node { - return n.mapType -} - -func (n *mapFieldNode) fieldTag() node { - return n.tag -} - -func (n *mapFieldNode) fieldExtendee() node { - return nil -} - -func (n *mapFieldNode) getGroupKeyword() node { - return nil -} - -func (n *mapFieldNode) messageName() node { - return n.name -} - -func (n *mapFieldNode) keyField() *syntheticMapField { - k := n.mapType.keyType - t := &compoundIdentNode{val: k.val} - t.setRange(k, k) - return newSyntheticMapField(t, 1) -} - -func (n *mapFieldNode) valueField() *syntheticMapField { - return newSyntheticMapField(n.mapType.valueType, 2) -} - -func newSyntheticMapField(ident *compoundIdentNode, tagNum uint64) *syntheticMapField { - tag := &intLiteralNode{ - basicNode: basicNode{ - posRange: posRange{start: *ident.start(), end: *ident.end()}, - }, - val: tagNum, +func (list *fieldRefList) toNodes() ([]*ast.FieldReferenceNode, []*ast.RuneNode) { + l := 0 + for cur := list; cur != nil; cur = cur.next { + l++ } - return &syntheticMapField{ident: ident, tag: tag} -} - -type syntheticMapField struct { - ident *compoundIdentNode - tag *intLiteralNode -} - -func (n *syntheticMapField) start() *SourcePos { - return n.ident.start() -} - -func (n *syntheticMapField) end() *SourcePos { - return n.ident.end() -} - -func (n *syntheticMapField) leadingComments() []comment { - return nil -} - -func (n *syntheticMapField) trailingComments() []comment { - return nil -} - -func (n *syntheticMapField) fieldLabel() node { - return n.ident -} - -func (n *syntheticMapField) fieldName() node { - return n.ident -} - -func (n *syntheticMapField) fieldType() node { - return n.ident -} - -func (n *syntheticMapField) fieldTag() node { - return n.tag -} - -func (n *syntheticMapField) fieldExtendee() node { - return nil -} - -func (n *syntheticMapField) getGroupKeyword() node { - return nil -} - -type extensionRangeNode struct { - basicCompositeNode - ranges []*rangeNode - options *compactOptionsNode -} - -type rangeNode struct { - basicCompositeNode - startNode, endNode node - endMax bool -} - -func (n *rangeNode) rangeStart() node { - return n.startNode -} - -func (n *rangeNode) rangeEnd() node { - if n.endNode == nil { - return n.startNode + refs := make([]*ast.FieldReferenceNode, l) + dots := make([]*ast.RuneNode, l-1) + for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 { + refs[i] = cur.ref + if cur.dot != nil { + dots[i] = cur.dot + } } - return n.endNode -} -func (n *rangeNode) startValue() interface{} { - return n.startNode.(intLiteral).value() + return refs, dots } -func (n *rangeNode) startValueAsInt32(min, max int32) (int32, bool) { - return n.startNode.(intLiteral).asInt32(min, max) +type identList struct { + ident *ast.IdentNode + dot *ast.RuneNode + next *identList } -func (n *rangeNode) endValue() interface{} { - l, ok := n.endNode.(intLiteral) - if !ok { - return nil +func (list *identList) toIdentValueNode(leadingDot *ast.RuneNode) ast.IdentValueNode { + if list.next == nil && leadingDot == nil { + // single name + return list.ident } - return l.value() -} -func (n *rangeNode) endValueAsInt32(min, max int32) (int32, bool) { - if n.endMax { - return max, true - } - if n.endNode == nil { - return n.startValueAsInt32(min, max) + l := 0 + for cur := list; cur != nil; cur = cur.next { + l++ } - return n.endNode.(intLiteral).asInt32(min, max) -} - -type reservedNode struct { - basicCompositeNode - ranges []*rangeNode - names []*compoundStringNode -} - -type enumNode struct { - basicCompositeNode - name *identNode - decls []*enumElement -} - -type enumElement struct { - // a discriminated union: only one field will be set - option *optionNode - value *enumValueNode - reserved *reservedNode - empty *basicNode -} - -func (n *enumElement) start() *SourcePos { - return n.get().start() -} - -func (n *enumElement) end() *SourcePos { - return n.get().end() -} - -func (n *enumElement) leadingComments() []comment { - return n.get().leadingComments() -} - -func (n *enumElement) trailingComments() []comment { - return n.get().trailingComments() -} - -func (n *enumElement) get() node { - switch { - case n.option != nil: - return n.option - case n.value != nil: - return n.value - default: - return n.empty + idents := make([]*ast.IdentNode, l) + dots := make([]*ast.RuneNode, l-1) + for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 { + idents[i] = cur.ident + if cur.dot != nil { + dots[i] = cur.dot + } } -} -type enumValueNode struct { - basicCompositeNode - name *identNode - options *compactOptionsNode - number *compoundIntNode + return ast.NewCompoundIdentNode(leadingDot, idents, dots) } -func (n *enumValueNode) getName() node { - return n.name +type messageFieldEntry struct { + field *ast.MessageFieldNode + delimiter *ast.RuneNode } -func (n *enumValueNode) getNumber() node { - return n.number +type messageFieldList struct { + field *messageFieldEntry + next *messageFieldList } -type messageNode struct { - basicCompositeNode - name *identNode - decls []*messageElement -} - -func (n *messageNode) messageName() node { - return n.name -} - -type messageElement struct { - // a discriminated union: only one field will be set - option *optionNode - field *fieldNode - mapField *mapFieldNode - oneOf *oneOfNode - group *groupNode - nested *messageNode - enum *enumNode - extend *extendNode - extensionRange *extensionRangeNode - reserved *reservedNode - empty *basicNode -} - -func (n *messageElement) start() *SourcePos { - return n.get().start() -} - -func (n *messageElement) end() *SourcePos { - return n.get().end() -} - -func (n *messageElement) leadingComments() []comment { - return n.get().leadingComments() -} - -func (n *messageElement) trailingComments() []comment { - return n.get().trailingComments() -} - -func (n *messageElement) get() node { - switch { - case n.option != nil: - return n.option - case n.field != nil: - return n.field - case n.mapField != nil: - return n.mapField - case n.oneOf != nil: - return n.oneOf - case n.group != nil: - return n.group - case n.nested != nil: - return n.nested - case n.enum != nil: - return n.enum - case n.extend != nil: - return n.extend - case n.extensionRange != nil: - return n.extensionRange - case n.reserved != nil: - return n.reserved - default: - return n.empty +func (list *messageFieldList) toNodes() ([]*ast.MessageFieldNode, []*ast.RuneNode) { + if list == nil { + return nil, nil } -} - -type extendNode struct { - basicCompositeNode - extendee *compoundIdentNode - decls []*extendElement -} - -type extendElement struct { - // a discriminated union: only one field will be set - field *fieldNode - group *groupNode - empty *basicNode -} - -func (n *extendElement) start() *SourcePos { - return n.get().start() -} - -func (n *extendElement) end() *SourcePos { - return n.get().end() -} - -func (n *extendElement) leadingComments() []comment { - return n.get().leadingComments() -} - -func (n *extendElement) trailingComments() []comment { - return n.get().trailingComments() -} - -func (n *extendElement) get() node { - switch { - case n.field != nil: - return n.field - case n.group != nil: - return n.group - default: - return n.empty + l := 0 + for cur := list; cur != nil; cur = cur.next { + l++ } -} - -type serviceNode struct { - basicCompositeNode - name *identNode - decls []*serviceElement -} - -type serviceElement struct { - // a discriminated union: only one field will be set - option *optionNode - rpc *methodNode - empty *basicNode -} - -func (n *serviceElement) start() *SourcePos { - return n.get().start() -} - -func (n *serviceElement) end() *SourcePos { - return n.get().end() -} - -func (n *serviceElement) leadingComments() []comment { - return n.get().leadingComments() -} - -func (n *serviceElement) trailingComments() []comment { - return n.get().trailingComments() -} - -func (n *serviceElement) get() node { - switch { - case n.option != nil: - return n.option - case n.rpc != nil: - return n.rpc - default: - return n.empty + fields := make([]*ast.MessageFieldNode, l) + delimiters := make([]*ast.RuneNode, l) + for cur, i := list, 0; cur != nil; cur, i = cur.next, i+1 { + fields[i] = cur.field.field + if cur.field.delimiter != nil { + delimiters[i] = cur.field.delimiter + } } -} - -type methodNode struct { - basicCompositeNode - name *identNode - input *rpcTypeNode - output *rpcTypeNode - options []*optionNode -} - -func (n *methodNode) getInputType() node { - return n.input.msgType -} - -func (n *methodNode) getOutputType() node { - return n.output.msgType -} - -type rpcTypeNode struct { - basicCompositeNode - msgType *compoundIdentNode - streamKeyword node -} - -type noSourceNode struct { - pos *SourcePos -} - -func (n noSourceNode) start() *SourcePos { - return n.pos -} - -func (n noSourceNode) end() *SourcePos { - return n.pos -} - -func (n noSourceNode) leadingComments() []comment { - return nil -} - -func (n noSourceNode) trailingComments() []comment { - return nil -} - -func (n noSourceNode) getSyntax() node { - return n -} - -func (n noSourceNode) getName() node { - return n -} - -func (n noSourceNode) getValue() valueNode { - return n -} - -func (n noSourceNode) fieldLabel() node { - return n -} - -func (n noSourceNode) fieldName() node { - return n -} - -func (n noSourceNode) fieldType() node { - return n -} - -func (n noSourceNode) fieldTag() node { - return n -} - -func (n noSourceNode) fieldExtendee() node { - return n -} - -func (n noSourceNode) getGroupKeyword() node { - return n -} - -func (n noSourceNode) rangeStart() node { - return n -} - -func (n noSourceNode) rangeEnd() node { - return n -} - -func (n noSourceNode) getNumber() node { - return n -} - -func (n noSourceNode) messageName() node { - return n -} - -func (n noSourceNode) getInputType() node { - return n -} - -func (n noSourceNode) getOutputType() node { - return n -} -func (n noSourceNode) value() interface{} { - return nil + return fields, delimiters } diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/doc.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/doc.go new file mode 100644 index 00000000000..e8902000a29 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/doc.go @@ -0,0 +1,27 @@ +// Package ast defines types for modeling the AST (Abstract Syntax +// Tree) for the protocol buffers source language. +// +// All nodes of the tree implement the Node interface. Leaf nodes in the +// tree implement TerminalNode and all others implement CompositeNode. +// The root of the tree for a proto source file is a *FileNode. +// +// Comments are not represented as nodes in the tree. Instead, they are +// attached to all terminal nodes in the tree. So, when lexing, comments +// are accumulated until the next non-comment token is found. The AST +// model in this package thus provides access to all comments in the +// file, regardless of location (unlike the SourceCodeInfo present in +// descriptor protos, which are lossy). The comments associated with a +// a non-leaf/non-token node (i.e. a CompositeNode) come from the first +// and last nodes in its sub-tree. +// +// Creation of AST nodes should use the factory functions in this +// package instead of struct literals. Some factory functions accept +// optional arguments, which means the arguments can be nil. If nil +// values are provided for other (non-optional) arguments, the resulting +// node may be invalid and cause panics later in the program. +// +// This package defines numerous interfaces. However, user code should +// not attempt to implement any of them. Most consumers of an AST will +// not work correctly if they encounter concrete implementations other +// than the ones defined in this package. +package ast diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/enum.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/enum.go new file mode 100644 index 00000000000..769e056e8aa --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/enum.go @@ -0,0 +1,154 @@ +package ast + +import "fmt" + +// EnumNode represents an enum declaration. Example: +// +// enum Foo { BAR = 0; BAZ = 1 } +type EnumNode struct { + compositeNode + Keyword *KeywordNode + Name *IdentNode + OpenBrace *RuneNode + Decls []EnumElement + CloseBrace *RuneNode +} + +func (*EnumNode) fileElement() {} +func (*EnumNode) msgElement() {} + +// NewEnumNode creates a new *EnumNode. All arguments must be non-nil. While +// it is technically allowed for decls to be nil or empty, the resulting node +// will not be a valid enum, which must have at least one value. +// - keyword: The token corresponding to the "enum" keyword. +// - name: The token corresponding to the enum's name. +// - openBrace: The token corresponding to the "{" rune that starts the body. +// - decls: All declarations inside the enum body. +// - closeBrace: The token corresponding to the "}" rune that ends the body. +func NewEnumNode(keyword *KeywordNode, name *IdentNode, openBrace *RuneNode, decls []EnumElement, closeBrace *RuneNode) *EnumNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if openBrace == nil { + panic("openBrace is nil") + } + if closeBrace == nil { + panic("closeBrace is nil") + } + children := make([]Node, 0, 4+len(decls)) + children = append(children, keyword, name, openBrace) + for _, decl := range decls { + children = append(children, decl) + } + children = append(children, closeBrace) + + for _, decl := range decls { + switch decl.(type) { + case *OptionNode, *EnumValueNode, *ReservedNode, *EmptyDeclNode: + default: + panic(fmt.Sprintf("invalid EnumElement type: %T", decl)) + } + } + + return &EnumNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Name: name, + OpenBrace: openBrace, + CloseBrace: closeBrace, + Decls: decls, + } +} + +// EnumElement is an interface implemented by all AST nodes that can +// appear in the body of an enum declaration. +type EnumElement interface { + Node + enumElement() +} + +var _ EnumElement = (*OptionNode)(nil) +var _ EnumElement = (*EnumValueNode)(nil) +var _ EnumElement = (*ReservedNode)(nil) +var _ EnumElement = (*EmptyDeclNode)(nil) + +// EnumValueDeclNode is a placeholder interface for AST nodes that represent +// enum values. This allows NoSourceNode to be used in place of *EnumValueNode +// for some usages. +type EnumValueDeclNode interface { + Node + GetName() Node + GetNumber() Node +} + +var _ EnumValueDeclNode = (*EnumValueNode)(nil) +var _ EnumValueDeclNode = NoSourceNode{} + +// EnumNode represents an enum declaration. Example: +// +// UNSET = 0 [deprecated = true]; +type EnumValueNode struct { + compositeNode + Name *IdentNode + Equals *RuneNode + Number IntValueNode + Options *CompactOptionsNode + Semicolon *RuneNode +} + +func (*EnumValueNode) enumElement() {} + +// NewEnumValueNode creates a new *EnumValueNode. All arguments must be non-nil +// except opts which is only non-nil if the declaration included options. +// - name: The token corresponding to the enum value's name. +// - equals: The token corresponding to the '=' rune after the name. +// - number: The token corresponding to the enum value's number. +// - opts: Optional set of enum value options. +// - semicolon: The token corresponding to the ";" rune that ends the declaration. +func NewEnumValueNode(name *IdentNode, equals *RuneNode, number IntValueNode, opts *CompactOptionsNode, semicolon *RuneNode) *EnumValueNode { + if name == nil { + panic("name is nil") + } + if equals == nil { + panic("equals is nil") + } + if number == nil { + panic("number is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + numChildren := 4 + if opts != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + children = append(children, name, equals, number) + if opts != nil { + children = append(children, opts) + } + children = append(children, semicolon) + return &EnumValueNode{ + compositeNode: compositeNode{ + children: children, + }, + Name: name, + Equals: equals, + Number: number, + Options: opts, + Semicolon: semicolon, + } +} + +func (e *EnumValueNode) GetName() Node { + return e.Name +} + +func (e *EnumValueNode) GetNumber() Node { + return e.Number +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/field.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/field.go new file mode 100644 index 00000000000..b5183af8b67 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/field.go @@ -0,0 +1,608 @@ +package ast + +import "fmt" + +// FieldDeclNode is a node in the AST that defines a field. This includes +// normal message fields as well as extensions. There are multiple types +// of AST nodes that declare fields: +// - *FieldNode +// - *GroupNode +// - *MapFieldNode +// This also allows NoSourceNode to be used in place of one of the above +// for some usages. +type FieldDeclNode interface { + Node + FieldLabel() Node + FieldName() Node + FieldType() Node + FieldTag() Node + FieldExtendee() Node + GetGroupKeyword() Node + GetOptions() *CompactOptionsNode +} + +var _ FieldDeclNode = (*FieldNode)(nil) +var _ FieldDeclNode = (*GroupNode)(nil) +var _ FieldDeclNode = (*MapFieldNode)(nil) +var _ FieldDeclNode = (*SyntheticMapField)(nil) +var _ FieldDeclNode = NoSourceNode{} + +// FieldNode represents a normal field declaration (not groups or maps). It +// can represent extension fields as well as non-extension fields (both inside +// of messages and inside of one-ofs). Example: +// +// optional string foo = 1; +type FieldNode struct { + compositeNode + Label FieldLabel + FldType IdentValueNode + Name *IdentNode + Equals *RuneNode + Tag *UintLiteralNode + Options *CompactOptionsNode + Semicolon *RuneNode + + // This is an up-link to the containing *ExtendNode for fields + // that are defined inside of "extend" blocks. + Extendee *ExtendNode +} + +func (*FieldNode) msgElement() {} +func (*FieldNode) oneOfElement() {} +func (*FieldNode) extendElement() {} + +// NewFieldNode creates a new *FieldNode. The label and options arguments may be +// nil but the others must be non-nil. +// - label: The token corresponding to the label keyword if present ("optional", +// "required", or "repeated"). +// - fieldType: The token corresponding to the field's type. +// - name: The token corresponding to the field's name. +// - equals: The token corresponding to the '=' rune after the name. +// - tag: The token corresponding to the field's tag number. +// - opts: Optional set of field options. +// - semicolon: The token corresponding to the ";" rune that ends the declaration. +func NewFieldNode(label *KeywordNode, fieldType IdentValueNode, name *IdentNode, equals *RuneNode, tag *UintLiteralNode, opts *CompactOptionsNode, semicolon *RuneNode) *FieldNode { + if fieldType == nil { + panic("fieldType is nil") + } + if name == nil { + panic("name is nil") + } + if equals == nil { + panic("equals is nil") + } + if tag == nil { + panic("tag is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + numChildren := 5 + if label != nil { + numChildren++ + } + if opts != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + if label != nil { + children = append(children, label) + } + children = append(children, fieldType, name, equals, tag) + if opts != nil { + children = append(children, opts) + } + children = append(children, semicolon) + + return &FieldNode{ + compositeNode: compositeNode{ + children: children, + }, + Label: newFieldLabel(label), + FldType: fieldType, + Name: name, + Equals: equals, + Tag: tag, + Options: opts, + Semicolon: semicolon, + } +} + +func (n *FieldNode) FieldLabel() Node { + // proto3 fields and fields inside one-ofs will not have a label and we need + // this check in order to return a nil node -- otherwise we'd return a + // non-nil node that has a nil pointer value in it :/ + if n.Label.KeywordNode == nil { + return nil + } + return n.Label.KeywordNode +} + +func (n *FieldNode) FieldName() Node { + return n.Name +} + +func (n *FieldNode) FieldType() Node { + return n.FldType +} + +func (n *FieldNode) FieldTag() Node { + return n.Tag +} + +func (n *FieldNode) FieldExtendee() Node { + if n.Extendee != nil { + return n.Extendee.Extendee + } + return nil +} + +func (n *FieldNode) GetGroupKeyword() Node { + return nil +} + +func (n *FieldNode) GetOptions() *CompactOptionsNode { + return n.Options +} + +// FieldLabel represents the label of a field, which indicates its cardinality +// (i.e. whether it is optional, required, or repeated). +type FieldLabel struct { + *KeywordNode + Repeated bool + Required bool +} + +func newFieldLabel(lbl *KeywordNode) FieldLabel { + repeated, required := false, false + if lbl != nil { + repeated = lbl.Val == "repeated" + required = lbl.Val == "required" + } + return FieldLabel{ + KeywordNode: lbl, + Repeated: repeated, + Required: required, + } +} + +// IsPresent returns true if a label keyword was present in the declaration +// and false if it was absent. +func (f *FieldLabel) IsPresent() bool { + return f.KeywordNode != nil +} + +// GroupNode represents a group declaration, which doubles as a field and inline +// message declaration. It can represent extension fields as well as +// non-extension fields (both inside of messages and inside of one-ofs). +// Example: +// +// optional group Key = 4 { +// optional uint64 id = 1; +// optional string name = 2; +// } +type GroupNode struct { + compositeNode + Label FieldLabel + Keyword *KeywordNode + Name *IdentNode + Equals *RuneNode + Tag *UintLiteralNode + Options *CompactOptionsNode + MessageBody + + // This is an up-link to the containing *ExtendNode for groups + // that are defined inside of "extend" blocks. + Extendee *ExtendNode +} + +func (*GroupNode) msgElement() {} +func (*GroupNode) oneOfElement() {} +func (*GroupNode) extendElement() {} + +// NewGroupNode creates a new *GroupNode. The label and options arguments may be +// nil but the others must be non-nil. +// - label: The token corresponding to the label keyword if present ("optional", +// "required", or "repeated"). +// - keyword: The token corresponding to the "group" keyword. +// - name: The token corresponding to the field's name. +// - equals: The token corresponding to the '=' rune after the name. +// - tag: The token corresponding to the field's tag number. +// - opts: Optional set of field options. +// - openBrace: The token corresponding to the "{" rune that starts the body. +// - decls: All declarations inside the group body. +// - closeBrace: The token corresponding to the "}" rune that ends the body. +func NewGroupNode(label *KeywordNode, keyword *KeywordNode, name *IdentNode, equals *RuneNode, tag *UintLiteralNode, opts *CompactOptionsNode, openBrace *RuneNode, decls []MessageElement, closeBrace *RuneNode) *GroupNode { + if keyword == nil { + panic("fieldType is nil") + } + if name == nil { + panic("name is nil") + } + if equals == nil { + panic("equals is nil") + } + if tag == nil { + panic("tag is nil") + } + if openBrace == nil { + panic("openBrace is nil") + } + if closeBrace == nil { + panic("closeBrace is nil") + } + numChildren := 6 + len(decls) + if label != nil { + numChildren++ + } + if opts != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + if label != nil { + children = append(children, label) + } + children = append(children, keyword, name, equals, tag) + if opts != nil { + children = append(children, opts) + } + children = append(children, openBrace) + for _, decl := range decls { + children = append(children, decl) + } + children = append(children, closeBrace) + + ret := &GroupNode{ + compositeNode: compositeNode{ + children: children, + }, + Label: newFieldLabel(label), + Keyword: keyword, + Name: name, + Equals: equals, + Tag: tag, + Options: opts, + } + populateMessageBody(&ret.MessageBody, openBrace, decls, closeBrace) + return ret +} + +func (n *GroupNode) FieldLabel() Node { + if n.Label.KeywordNode == nil { + // return nil interface to indicate absence, not a typed nil + return nil + } + return n.Label.KeywordNode +} + +func (n *GroupNode) FieldName() Node { + return n.Name +} + +func (n *GroupNode) FieldType() Node { + return n.Keyword +} + +func (n *GroupNode) FieldTag() Node { + return n.Tag +} + +func (n *GroupNode) FieldExtendee() Node { + if n.Extendee != nil { + return n.Extendee.Extendee + } + return nil +} + +func (n *GroupNode) GetGroupKeyword() Node { + return n.Keyword +} + +func (n *GroupNode) GetOptions() *CompactOptionsNode { + return n.Options +} + +func (n *GroupNode) MessageName() Node { + return n.Name +} + +// OneOfNode represents a one-of declaration. Example: +// +// oneof query { +// string by_name = 2; +// Type by_type = 3; +// Address by_address = 4; +// Labels by_label = 5; +// } +type OneOfNode struct { + compositeNode + Keyword *KeywordNode + Name *IdentNode + OpenBrace *RuneNode + Decls []OneOfElement + CloseBrace *RuneNode +} + +func (*OneOfNode) msgElement() {} + +// NewOneOfNode creates a new *OneOfNode. All arguments must be non-nil. While +// it is technically allowed for decls to be nil or empty, the resulting node +// will not be a valid oneof, which must have at least one field. +// - keyword: The token corresponding to the "oneof" keyword. +// - name: The token corresponding to the oneof's name. +// - openBrace: The token corresponding to the "{" rune that starts the body. +// - decls: All declarations inside the oneof body. +// - closeBrace: The token corresponding to the "}" rune that ends the body. +func NewOneOfNode(keyword *KeywordNode, name *IdentNode, openBrace *RuneNode, decls []OneOfElement, closeBrace *RuneNode) *OneOfNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if openBrace == nil { + panic("openBrace is nil") + } + if closeBrace == nil { + panic("closeBrace is nil") + } + children := make([]Node, 0, 4+len(decls)) + children = append(children, keyword, name, openBrace) + for _, decl := range decls { + children = append(children, decl) + } + children = append(children, closeBrace) + + for _, decl := range decls { + switch decl := decl.(type) { + case *OptionNode, *FieldNode, *GroupNode, *EmptyDeclNode: + default: + panic(fmt.Sprintf("invalid OneOfElement type: %T", decl)) + } + } + + return &OneOfNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Name: name, + OpenBrace: openBrace, + Decls: decls, + CloseBrace: closeBrace, + } +} + +// OneOfElement is an interface implemented by all AST nodes that can +// appear in the body of a oneof declaration. +type OneOfElement interface { + Node + oneOfElement() +} + +var _ OneOfElement = (*OptionNode)(nil) +var _ OneOfElement = (*FieldNode)(nil) +var _ OneOfElement = (*GroupNode)(nil) +var _ OneOfElement = (*EmptyDeclNode)(nil) + +// MapTypeNode represents the type declaration for a map field. It defines +// both the key and value types for the map. Example: +// +// map +type MapTypeNode struct { + compositeNode + Keyword *KeywordNode + OpenAngle *RuneNode + KeyType *IdentNode + Comma *RuneNode + ValueType IdentValueNode + CloseAngle *RuneNode +} + +// NewMapTypeNode creates a new *MapTypeNode. All arguments must be non-nil. +// - keyword: The token corresponding to the "map" keyword. +// - openAngle: The token corresponding to the "<" rune after the keyword. +// - keyType: The token corresponding to the key type for the map. +// - comma: The token corresponding to the "," rune between key and value types. +// - valType: The token corresponding to the value type for the map. +// - closeAngle: The token corresponding to the ">" rune that ends the declaration. +func NewMapTypeNode(keyword *KeywordNode, openAngle *RuneNode, keyType *IdentNode, comma *RuneNode, valType IdentValueNode, closeAngle *RuneNode) *MapTypeNode { + if keyword == nil { + panic("keyword is nil") + } + if openAngle == nil { + panic("openAngle is nil") + } + if keyType == nil { + panic("keyType is nil") + } + if comma == nil { + panic("comma is nil") + } + if valType == nil { + panic("valType is nil") + } + if closeAngle == nil { + panic("closeAngle is nil") + } + children := []Node{keyword, openAngle, keyType, comma, valType, closeAngle} + return &MapTypeNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + OpenAngle: openAngle, + KeyType: keyType, + Comma: comma, + ValueType: valType, + CloseAngle: closeAngle, + } +} + +// MapFieldNode represents a map field declaration. Example: +// +// map replacements = 3 [deprecated = true]; +type MapFieldNode struct { + compositeNode + MapType *MapTypeNode + Name *IdentNode + Equals *RuneNode + Tag *UintLiteralNode + Options *CompactOptionsNode + Semicolon *RuneNode +} + +func (*MapFieldNode) msgElement() {} + +// NewMapFieldNode creates a new *MapFieldNode. All arguments must be non-nil +// except opts, which may be nil. +// - mapType: The token corresponding to the map type. +// - name: The token corresponding to the field's name. +// - equals: The token corresponding to the '=' rune after the name. +// - tag: The token corresponding to the field's tag number. +// - opts: Optional set of field options. +// - semicolon: The token corresponding to the ";" rune that ends the declaration. +func NewMapFieldNode(mapType *MapTypeNode, name *IdentNode, equals *RuneNode, tag *UintLiteralNode, opts *CompactOptionsNode, semicolon *RuneNode) *MapFieldNode { + if mapType == nil { + panic("mapType is nil") + } + if name == nil { + panic("name is nil") + } + if equals == nil { + panic("equals is nil") + } + if tag == nil { + panic("tag is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + numChildren := 5 + if opts != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + children = append(children, mapType, name, equals, tag) + if opts != nil { + children = append(children, opts) + } + children = append(children, semicolon) + + return &MapFieldNode{ + compositeNode: compositeNode{ + children: children, + }, + MapType: mapType, + Name: name, + Equals: equals, + Tag: tag, + Options: opts, + Semicolon: semicolon, + } +} + +func (n *MapFieldNode) FieldLabel() Node { + return nil +} + +func (n *MapFieldNode) FieldName() Node { + return n.Name +} + +func (n *MapFieldNode) FieldType() Node { + return n.MapType +} + +func (n *MapFieldNode) FieldTag() Node { + return n.Tag +} + +func (n *MapFieldNode) FieldExtendee() Node { + return nil +} + +func (n *MapFieldNode) GetGroupKeyword() Node { + return nil +} + +func (n *MapFieldNode) GetOptions() *CompactOptionsNode { + return n.Options +} + +func (n *MapFieldNode) MessageName() Node { + return n.Name +} + +func (n *MapFieldNode) KeyField() *SyntheticMapField { + return NewSyntheticMapField(n.MapType.KeyType, 1) +} + +func (n *MapFieldNode) ValueField() *SyntheticMapField { + return NewSyntheticMapField(n.MapType.ValueType, 2) +} + +// SyntheticMapField is not an actual node in the AST but a synthetic node +// that implements FieldDeclNode. These are used to represent the implicit +// field declarations of the "key" and "value" fields in a map entry. +type SyntheticMapField struct { + Ident IdentValueNode + Tag *UintLiteralNode +} + +// NewSyntheticMapField creates a new *SyntheticMapField for the given +// identifier (either a key or value type in a map declaration) and tag +// number (1 for key, 2 for value). +func NewSyntheticMapField(ident IdentValueNode, tagNum uint64) *SyntheticMapField { + tag := &UintLiteralNode{ + terminalNode: terminalNode{ + posRange: PosRange{Start: *ident.Start(), End: *ident.End()}, + }, + Val: tagNum, + } + return &SyntheticMapField{Ident: ident, Tag: tag} +} + +func (n *SyntheticMapField) Start() *SourcePos { + return n.Ident.Start() +} + +func (n *SyntheticMapField) End() *SourcePos { + return n.Ident.End() +} + +func (n *SyntheticMapField) LeadingComments() []Comment { + return nil +} + +func (n *SyntheticMapField) TrailingComments() []Comment { + return nil +} + +func (n *SyntheticMapField) FieldLabel() Node { + return n.Ident +} + +func (n *SyntheticMapField) FieldName() Node { + return n.Ident +} + +func (n *SyntheticMapField) FieldType() Node { + return n.Ident +} + +func (n *SyntheticMapField) FieldTag() Node { + return n.Tag +} + +func (n *SyntheticMapField) FieldExtendee() Node { + return nil +} + +func (n *SyntheticMapField) GetGroupKeyword() Node { + return nil +} + +func (n *SyntheticMapField) GetOptions() *CompactOptionsNode { + return nil +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/file.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/file.go new file mode 100644 index 00000000000..ed151b1b2e5 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/file.go @@ -0,0 +1,234 @@ +package ast + +import "fmt" + +// FileDeclNode is a placeholder interface for AST nodes that represent files. +// This allows NoSourceNode to be used in place of *FileNode for some usages. +type FileDeclNode interface { + Node + GetSyntax() Node +} + +var _ FileDeclNode = (*FileNode)(nil) +var _ FileDeclNode = NoSourceNode{} + +// FileNode is the root of the AST hierarchy. It represents an entire +// protobuf source file. +type FileNode struct { + compositeNode + Syntax *SyntaxNode // nil if file has no syntax declaration + Decls []FileElement + + // Any comments that follow the last token in the file. + FinalComments []Comment + // Any whitespace at the end of the file (after the last token or + // last comment in the file). + FinalWhitespace string +} + +// NewFileElement creates a new *FileNode. The syntax parameter is optional. If it +// is absent, it means the file had no syntax declaration. +// +// This function panics if the concrete type of any element of decls is not +// from this package. +func NewFileNode(syntax *SyntaxNode, decls []FileElement) *FileNode { + numChildren := len(decls) + if syntax != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + if syntax != nil { + children = append(children, syntax) + } + for _, decl := range decls { + children = append(children, decl) + } + + for _, decl := range decls { + switch decl := decl.(type) { + case *PackageNode, *ImportNode, *OptionNode, *MessageNode, + *EnumNode, *ExtendNode, *ServiceNode, *EmptyDeclNode: + default: + panic(fmt.Sprintf("invalid FileElement type: %T", decl)) + } + } + + return &FileNode{ + compositeNode: compositeNode{ + children: children, + }, + Syntax: syntax, + Decls: decls, + } +} + +func NewEmptyFileNode(filename string) *FileNode { + return &FileNode{ + compositeNode: compositeNode{ + children: []Node{NewNoSourceNode(filename)}, + }, + } +} + +func (f *FileNode) GetSyntax() Node { + return f.Syntax +} + +// FileElement is an interface implemented by all AST nodes that are +// allowed as top-level declarations in the file. +type FileElement interface { + Node + fileElement() +} + +var _ FileElement = (*ImportNode)(nil) +var _ FileElement = (*PackageNode)(nil) +var _ FileElement = (*OptionNode)(nil) +var _ FileElement = (*MessageNode)(nil) +var _ FileElement = (*EnumNode)(nil) +var _ FileElement = (*ExtendNode)(nil) +var _ FileElement = (*ServiceNode)(nil) +var _ FileElement = (*EmptyDeclNode)(nil) + +// SyntaxNode represents a syntax declaration, which if present must be +// the first non-comment content. Example: +// +// syntax = "proto2"; +// +// Files that don't have a syntax node are assumed to use proto2 syntax. +type SyntaxNode struct { + compositeNode + Keyword *KeywordNode + Equals *RuneNode + Syntax StringValueNode + Semicolon *RuneNode +} + +// NewSyntaxNode creates a new *SyntaxNode. All four arguments must be non-nil: +// - keyword: The token corresponding to the "syntax" keyword. +// - equals: The token corresponding to the "=" rune. +// - syntax: The actual syntax value, e.g. "proto2" or "proto3". +// - semicolon: The token corresponding to the ";" rune that ends the declaration. +func NewSyntaxNode(keyword *KeywordNode, equals *RuneNode, syntax StringValueNode, semicolon *RuneNode) *SyntaxNode { + if keyword == nil { + panic("keyword is nil") + } + if equals == nil { + panic("equals is nil") + } + if syntax == nil { + panic("syntax is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + children := []Node{keyword, equals, syntax, semicolon} + return &SyntaxNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Equals: equals, + Syntax: syntax, + Semicolon: semicolon, + } +} + +// ImportNode represents an import statement. Example: +// +// import "google/protobuf/empty.proto"; +type ImportNode struct { + compositeNode + Keyword *KeywordNode + // Optional; if present indicates this is a public import + Public *KeywordNode + // Optional; if present indicates this is a weak import + Weak *KeywordNode + Name StringValueNode + Semicolon *RuneNode +} + +// NewImportNode creates a new *ImportNode. The public and weak arguments are optional +// and only one or the other (or neither) may be specified, not both. When public is +// non-nil, it indicates the "public" keyword in the import statement and means this is +// a public import. When weak is non-nil, it indicates the "weak" keyword in the import +// statement and means this is a weak import. When both are nil, this is a normal import. +// The other arguments must be non-nil: +// - keyword: The token corresponding to the "import" keyword. +// - public: The token corresponding to the optional "public" keyword. +// - weak: The token corresponding to the optional "weak" keyword. +// - name: The actual imported file name. +// - semicolon: The token corresponding to the ";" rune that ends the declaration. +func NewImportNode(keyword *KeywordNode, public *KeywordNode, weak *KeywordNode, name StringValueNode, semicolon *RuneNode) *ImportNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + numChildren := 3 + if public != nil || weak != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + children = append(children, keyword) + if public != nil { + children = append(children, public) + } else if weak != nil { + children = append(children, weak) + } + children = append(children, name, semicolon) + + return &ImportNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Public: public, + Weak: weak, + Name: name, + Semicolon: semicolon, + } +} + +func (*ImportNode) fileElement() {} + +// PackageNode represents a package declaration. Example: +// +// package foobar.com; +type PackageNode struct { + compositeNode + Keyword *KeywordNode + Name IdentValueNode + Semicolon *RuneNode +} + +func (*PackageNode) fileElement() {} + +// NewPackageNode creates a new *PackageNode. All three arguments must be non-nil: +// - keyword: The token corresponding to the "package" keyword. +// - name: The package name declared for the file. +// - semicolon: The token corresponding to the ";" rune that ends the declaration. +func NewPackageNode(keyword *KeywordNode, name IdentValueNode, semicolon *RuneNode) *PackageNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + children := []Node{keyword, name, semicolon} + return &PackageNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Name: name, + Semicolon: semicolon, + } +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/identifiers.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/identifiers.go new file mode 100644 index 00000000000..aa62add5ade --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/identifiers.go @@ -0,0 +1,134 @@ +package ast + +import ( + "fmt" + "strings" +) + +// Identifier is a possibly-qualified name. This is used to distinguish +// ValueNode values that are references/identifiers vs. those that are +// string literals. +type Identifier string + +// IdentValueNode is an AST node that represents an identifier. +type IdentValueNode interface { + ValueNode + AsIdentifier() Identifier +} + +var _ IdentValueNode = (*IdentNode)(nil) +var _ IdentValueNode = (*CompoundIdentNode)(nil) + +// IdentNode represents a simple, unqualified identifier. These are used to name +// elements declared in a protobuf file or to refer to elements. Example: +// +// foobar +type IdentNode struct { + terminalNode + Val string +} + +// NewIdentNode creates a new *IdentNode. The given val is the identifier text. +func NewIdentNode(val string, info TokenInfo) *IdentNode { + return &IdentNode{ + terminalNode: info.asTerminalNode(), + Val: val, + } +} + +func (n *IdentNode) Value() interface{} { + return n.AsIdentifier() +} + +func (n *IdentNode) AsIdentifier() Identifier { + return Identifier(n.Val) +} + +// ToKeyword is used to convert identifiers to keywords. Since keywords are not +// reserved in the protobuf language, they are initially lexed as identifiers +// and then converted to keywords based on context. +func (n *IdentNode) ToKeyword() *KeywordNode { + return (*KeywordNode)(n) +} + +// CompoundIdentNode represents a qualified identifier. A qualified identifier +// has at least one dot and possibly multiple identifier names (all separated by +// dots). If the identifier has a leading dot, then it is a *fully* qualified +// identifier. Example: +// +// .com.foobar.Baz +type CompoundIdentNode struct { + compositeNode + // Optional leading dot, indicating that the identifier is fully qualified. + LeadingDot *RuneNode + Components []*IdentNode + // Dots[0] is the dot after Components[0]. The length of Dots is always + // one less than the length of Components. + Dots []*RuneNode + // The text value of the identifier, with all components and dots + // concatenated. + Val string +} + +// NewCompoundIdentNode creates a *CompoundIdentNode. The leadingDot may be nil. +// The dots arg must have a length that is one less than the length of +// components. The components arg must not be empty. +func NewCompoundIdentNode(leadingDot *RuneNode, components []*IdentNode, dots []*RuneNode) *CompoundIdentNode { + if len(components) == 0 { + panic("must have at least one component") + } + if len(dots) != len(components)-1 { + panic(fmt.Sprintf("%d components requires %d dots, not %d", len(components), len(components)-1, len(dots))) + } + numChildren := len(components)*2 - 1 + if leadingDot != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + var b strings.Builder + if leadingDot != nil { + children = append(children, leadingDot) + b.WriteRune(leadingDot.Rune) + } + for i, comp := range components { + if i > 0 { + dot := dots[i-1] + children = append(children, dot) + b.WriteRune(dot.Rune) + } + children = append(children, comp) + b.WriteString(comp.Val) + } + return &CompoundIdentNode{ + compositeNode: compositeNode{ + children: children, + }, + LeadingDot: leadingDot, + Components: components, + Dots: dots, + Val: b.String(), + } +} + +func (n *CompoundIdentNode) Value() interface{} { + return n.AsIdentifier() +} + +func (n *CompoundIdentNode) AsIdentifier() Identifier { + return Identifier(n.Val) +} + +// KeywordNode is an AST node that represents a keyword. Keywords are +// like identifiers, but they have special meaning in particular contexts. +// Example: +// +// message +type KeywordNode IdentNode + +// NewKeywordNode creates a new *KeywordNode. The given val is the keyword. +func NewKeywordNode(val string, info TokenInfo) *KeywordNode { + return &KeywordNode{ + terminalNode: info.asTerminalNode(), + Val: val, + } +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/message.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/message.go new file mode 100644 index 00000000000..80651b66d6e --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/message.go @@ -0,0 +1,198 @@ +package ast + +import "fmt" + +// MessageDeclNode is a node in the AST that defines a message type. This +// includes normal message fields as well as implicit messages: +// - *MessageNode +// - *GroupNode (the group is a field and inline message type) +// - *MapFieldNode (map fields implicitly define a MapEntry message type) +// This also allows NoSourceNode to be used in place of one of the above +// for some usages. +type MessageDeclNode interface { + Node + MessageName() Node +} + +var _ MessageDeclNode = (*MessageNode)(nil) +var _ MessageDeclNode = (*GroupNode)(nil) +var _ MessageDeclNode = (*MapFieldNode)(nil) +var _ MessageDeclNode = NoSourceNode{} + +// MessageNode represents a message declaration. Example: +// +// message Foo { +// string name = 1; +// repeated string labels = 2; +// bytes extra = 3; +// } +type MessageNode struct { + compositeNode + Keyword *KeywordNode + Name *IdentNode + MessageBody +} + +func (*MessageNode) fileElement() {} +func (*MessageNode) msgElement() {} + +// NewMessageNode creates a new *MessageNode. All arguments must be non-nil. +// - keyword: The token corresponding to the "message" keyword. +// - name: The token corresponding to the field's name. +// - openBrace: The token corresponding to the "{" rune that starts the body. +// - decls: All declarations inside the message body. +// - closeBrace: The token corresponding to the "}" rune that ends the body. +func NewMessageNode(keyword *KeywordNode, name *IdentNode, openBrace *RuneNode, decls []MessageElement, closeBrace *RuneNode) *MessageNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if openBrace == nil { + panic("openBrace is nil") + } + if closeBrace == nil { + panic("closeBrace is nil") + } + children := make([]Node, 0, 4+len(decls)) + children = append(children, keyword, name, openBrace) + for _, decl := range decls { + children = append(children, decl) + } + children = append(children, closeBrace) + + ret := &MessageNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Name: name, + } + populateMessageBody(&ret.MessageBody, openBrace, decls, closeBrace) + return ret +} + +func (n *MessageNode) MessageName() Node { + return n.Name +} + +// MessageBody represents the body of a message. It is used by both +// MessageNodes and GroupNodes. +type MessageBody struct { + OpenBrace *RuneNode + Decls []MessageElement + CloseBrace *RuneNode +} + +func populateMessageBody(m *MessageBody, openBrace *RuneNode, decls []MessageElement, closeBrace *RuneNode) { + m.OpenBrace = openBrace + m.Decls = decls + for _, decl := range decls { + switch decl.(type) { + case *OptionNode, *FieldNode, *MapFieldNode, *GroupNode, *OneOfNode, + *MessageNode, *EnumNode, *ExtendNode, *ExtensionRangeNode, + *ReservedNode, *EmptyDeclNode: + default: + panic(fmt.Sprintf("invalid MessageElement type: %T", decl)) + } + } + m.CloseBrace = closeBrace +} + +// MessageElement is an interface implemented by all AST nodes that can +// appear in a message body. +type MessageElement interface { + Node + msgElement() +} + +var _ MessageElement = (*OptionNode)(nil) +var _ MessageElement = (*FieldNode)(nil) +var _ MessageElement = (*MapFieldNode)(nil) +var _ MessageElement = (*OneOfNode)(nil) +var _ MessageElement = (*GroupNode)(nil) +var _ MessageElement = (*MessageNode)(nil) +var _ MessageElement = (*EnumNode)(nil) +var _ MessageElement = (*ExtendNode)(nil) +var _ MessageElement = (*ExtensionRangeNode)(nil) +var _ MessageElement = (*ReservedNode)(nil) +var _ MessageElement = (*EmptyDeclNode)(nil) + +// ExtendNode represents a declaration of extension fields. Example: +// +// extend google.protobuf.FieldOptions { +// bool redacted = 33333; +// } +type ExtendNode struct { + compositeNode + Keyword *KeywordNode + Extendee IdentValueNode + OpenBrace *RuneNode + Decls []ExtendElement + CloseBrace *RuneNode +} + +func (*ExtendNode) fileElement() {} +func (*ExtendNode) msgElement() {} + +// NewExtendNode creates a new *ExtendNode. All arguments must be non-nil. +// - keyword: The token corresponding to the "extend" keyword. +// - extendee: The token corresponding to the name of the extended message. +// - openBrace: The token corresponding to the "{" rune that starts the body. +// - decls: All declarations inside the message body. +// - closeBrace: The token corresponding to the "}" rune that ends the body. +func NewExtendNode(keyword *KeywordNode, extendee IdentValueNode, openBrace *RuneNode, decls []ExtendElement, closeBrace *RuneNode) *ExtendNode { + if keyword == nil { + panic("keyword is nil") + } + if extendee == nil { + panic("extendee is nil") + } + if openBrace == nil { + panic("openBrace is nil") + } + if closeBrace == nil { + panic("closeBrace is nil") + } + children := make([]Node, 0, 4+len(decls)) + children = append(children, keyword, extendee, openBrace) + for _, decl := range decls { + children = append(children, decl) + } + children = append(children, closeBrace) + + ret := &ExtendNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Extendee: extendee, + OpenBrace: openBrace, + Decls: decls, + CloseBrace: closeBrace, + } + for _, decl := range decls { + switch decl := decl.(type) { + case *FieldNode: + decl.Extendee = ret + case *GroupNode: + decl.Extendee = ret + case *EmptyDeclNode: + default: + panic(fmt.Sprintf("invalid ExtendElement type: %T", decl)) + } + } + return ret +} + +// ExtendElement is an interface implemented by all AST nodes that can +// appear in the body of an extends declaration. +type ExtendElement interface { + Node + extendElement() +} + +var _ ExtendElement = (*FieldNode)(nil) +var _ ExtendElement = (*GroupNode)(nil) +var _ ExtendElement = (*EmptyDeclNode)(nil) diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/no_source.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/no_source.go new file mode 100644 index 00000000000..44e02b1013b --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/no_source.go @@ -0,0 +1,103 @@ +package ast + +// UnknownPos is a placeholder position when only the source file +// name is known. +func UnknownPos(filename string) *SourcePos { + return &SourcePos{Filename: filename} +} + +// NoSourceNode is a placeholder AST node that implements numerous +// interfaces in this package. It can be used to represent an AST +// element for a file whose source is not available. +type NoSourceNode struct { + pos *SourcePos +} + +// NewNoSourceNode creates a new NoSourceNode for the given filename. +func NewNoSourceNode(filename string) NoSourceNode { + return NoSourceNode{pos: UnknownPos(filename)} +} + +func (n NoSourceNode) Start() *SourcePos { + return n.pos +} + +func (n NoSourceNode) End() *SourcePos { + return n.pos +} + +func (n NoSourceNode) LeadingComments() []Comment { + return nil +} + +func (n NoSourceNode) TrailingComments() []Comment { + return nil +} + +func (n NoSourceNode) GetSyntax() Node { + return n +} + +func (n NoSourceNode) GetName() Node { + return n +} + +func (n NoSourceNode) GetValue() ValueNode { + return n +} + +func (n NoSourceNode) FieldLabel() Node { + return n +} + +func (n NoSourceNode) FieldName() Node { + return n +} + +func (n NoSourceNode) FieldType() Node { + return n +} + +func (n NoSourceNode) FieldTag() Node { + return n +} + +func (n NoSourceNode) FieldExtendee() Node { + return n +} + +func (n NoSourceNode) GetGroupKeyword() Node { + return n +} + +func (n NoSourceNode) GetOptions() *CompactOptionsNode { + return nil +} + +func (n NoSourceNode) RangeStart() Node { + return n +} + +func (n NoSourceNode) RangeEnd() Node { + return n +} + +func (n NoSourceNode) GetNumber() Node { + return n +} + +func (n NoSourceNode) MessageName() Node { + return n +} + +func (n NoSourceNode) GetInputType() Node { + return n +} + +func (n NoSourceNode) GetOutputType() Node { + return n +} + +func (n NoSourceNode) Value() interface{} { + return nil +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/node.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/node.go new file mode 100644 index 00000000000..a2a8a3b2c39 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/node.go @@ -0,0 +1,200 @@ +package ast + +// Node is the interface implemented by all nodes in the AST. It +// provides information about the span of this AST node in terms +// of location in the source file. It also provides information +// about all prior comments (attached as leading comments) and +// optional subsequent comments (attached as trailing comments). +type Node interface { + Start() *SourcePos + End() *SourcePos + LeadingComments() []Comment + TrailingComments() []Comment +} + +// TerminalNode represents a leaf in the AST. These represent +// the tokens/lexemes in the protobuf language. Comments and +// whitespace are accumulated by the lexer and associated with +// the following lexed token. +type TerminalNode interface { + Node + // PopLeadingComment removes the first leading comment from this + // token and returns it. If the node has no leading comments then + // this method will panic. + PopLeadingComment() Comment + // PushTrailingComment appends the given comment to the token's + // trailing comments. + PushTrailingComment(Comment) + // LeadingWhitespace returns any whitespace between the prior comment + // (last leading comment), if any, or prior lexed token and this token. + LeadingWhitespace() string + // RawText returns the raw text of the token as read from the source. + RawText() string +} + +var _ TerminalNode = (*StringLiteralNode)(nil) +var _ TerminalNode = (*UintLiteralNode)(nil) +var _ TerminalNode = (*FloatLiteralNode)(nil) +var _ TerminalNode = (*IdentNode)(nil) +var _ TerminalNode = (*BoolLiteralNode)(nil) +var _ TerminalNode = (*SpecialFloatLiteralNode)(nil) +var _ TerminalNode = (*KeywordNode)(nil) +var _ TerminalNode = (*RuneNode)(nil) + +// TokenInfo represents state accumulated by the lexer to associated with a +// token (aka terminal node). +type TokenInfo struct { + // The location of the token in the source file. + PosRange + // The raw text of the token. + RawText string + // Any comments encountered preceding this token. + LeadingComments []Comment + // Any leading whitespace immediately preceding this token. + LeadingWhitespace string + // Any trailing comments following this token. This is usually + // empty as tokens are created by the lexer immediately and + // trailing comments are accounted for afterwards, added using + // the node's PushTrailingComment method. + TrailingComments []Comment +} + +func (t *TokenInfo) asTerminalNode() terminalNode { + return terminalNode{ + posRange: t.PosRange, + leadingComments: t.LeadingComments, + leadingWhitespace: t.LeadingWhitespace, + trailingComments: t.TrailingComments, + raw: t.RawText, + } +} + +// CompositeNode represents any non-terminal node in the tree. These +// are interior or root nodes and have child nodes. +type CompositeNode interface { + Node + // All AST nodes that are immediate children of this one. + Children() []Node +} + +// terminalNode contains book-keeping shared by all TerminalNode +// implementations. It is embedded in all such node types in this +// package. It provides the implementation of the TerminalNode +// interface. +type terminalNode struct { + posRange PosRange + leadingComments []Comment + leadingWhitespace string + trailingComments []Comment + raw string +} + +func (n *terminalNode) Start() *SourcePos { + return &n.posRange.Start +} + +func (n *terminalNode) End() *SourcePos { + return &n.posRange.End +} + +func (n *terminalNode) LeadingComments() []Comment { + return n.leadingComments +} + +func (n *terminalNode) TrailingComments() []Comment { + return n.trailingComments +} + +func (n *terminalNode) PopLeadingComment() Comment { + c := n.leadingComments[0] + n.leadingComments = n.leadingComments[1:] + return c +} + +func (n *terminalNode) PushTrailingComment(c Comment) { + n.trailingComments = append(n.trailingComments, c) +} + +func (n *terminalNode) LeadingWhitespace() string { + return n.leadingWhitespace +} + +func (n *terminalNode) RawText() string { + return n.raw +} + +// compositeNode contains book-keeping shared by all CompositeNode +// implementations. It is embedded in all such node types in this +// package. It provides the implementation of the CompositeNode +// interface. +type compositeNode struct { + children []Node +} + +func (n *compositeNode) Children() []Node { + return n.children +} + +func (n *compositeNode) Start() *SourcePos { + return n.children[0].Start() +} + +func (n *compositeNode) End() *SourcePos { + return n.children[len(n.children)-1].End() +} + +func (n *compositeNode) LeadingComments() []Comment { + return n.children[0].LeadingComments() +} + +func (n *compositeNode) TrailingComments() []Comment { + return n.children[len(n.children)-1].TrailingComments() +} + +// RuneNode represents a single rune in protobuf source. Runes +// are typically collected into tokens, but some runes stand on +// their own, such as punctuation/symbols like commas, semicolons, +// equals signs, open and close symbols (braces, brackets, angles, +// and parentheses), and periods/dots. +type RuneNode struct { + terminalNode + Rune rune +} + +// NewRuneNode creates a new *RuneNode with the given properties. +func NewRuneNode(r rune, info TokenInfo) *RuneNode { + return &RuneNode{ + terminalNode: info.asTerminalNode(), + Rune: r, + } +} + +// EmptyDeclNode represents an empty declaration in protobuf source. +// These amount to extra semicolons, with no actual content preceding +// the semicolon. +type EmptyDeclNode struct { + compositeNode + Semicolon *RuneNode +} + +// NewEmptyDeclNode creates a new *EmptyDeclNode. The one argument must +// be non-nil. +func NewEmptyDeclNode(semicolon *RuneNode) *EmptyDeclNode { + if semicolon == nil { + panic("semicolon is nil") + } + return &EmptyDeclNode{ + compositeNode: compositeNode{ + children: []Node{semicolon}, + }, + Semicolon: semicolon, + } +} + +func (e *EmptyDeclNode) fileElement() {} +func (e *EmptyDeclNode) msgElement() {} +func (e *EmptyDeclNode) extendElement() {} +func (e *EmptyDeclNode) oneOfElement() {} +func (e *EmptyDeclNode) enumElement() {} +func (e *EmptyDeclNode) serviceElement() {} +func (e *EmptyDeclNode) methodElement() {} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/options.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/options.go new file mode 100644 index 00000000000..9f4a74e6972 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/options.go @@ -0,0 +1,300 @@ +package ast + +import "fmt" + +// OptionDeclNode is a placeholder interface for AST nodes that represent +// options. This allows NoSourceNode to be used in place of *OptionNode +// for some usages. +type OptionDeclNode interface { + Node + GetName() Node + GetValue() ValueNode +} + +var _ OptionDeclNode = (*OptionNode)(nil) +var _ OptionDeclNode = NoSourceNode{} + +// OptionNode represents the declaration of a single option for an element. +// It is used both for normal option declarations (start with "option" keyword +// and end with semicolon) and for compact options found in fields, enum values, +// and extension ranges. Example: +// +// option (custom.option) = "foo"; +type OptionNode struct { + compositeNode + Keyword *KeywordNode // absent for compact options + Name *OptionNameNode + Equals *RuneNode + Val ValueNode + Semicolon *RuneNode // absent for compact options +} + +func (e *OptionNode) fileElement() {} +func (e *OptionNode) msgElement() {} +func (e *OptionNode) oneOfElement() {} +func (e *OptionNode) enumElement() {} +func (e *OptionNode) serviceElement() {} +func (e *OptionNode) methodElement() {} + +// NewOptionNode creates a new *OptionNode for a full option declaration (as +// used in files, messages, oneofs, enums, services, and methods). All arguments +// must be non-nil. (Also see NewCompactOptionNode.) +// - keyword: The token corresponding to the "option" keyword. +// - name: The token corresponding to the name of the option. +// - equals: The token corresponding to the "=" rune after the name. +// - val: The token corresponding to the option value. +// - semicolon: The token corresponding to the ";" rune that ends the declaration. +func NewOptionNode(keyword *KeywordNode, name *OptionNameNode, equals *RuneNode, val ValueNode, semicolon *RuneNode) *OptionNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if equals == nil { + panic("equals is nil") + } + if val == nil { + panic("val is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + children := []Node{keyword, name, equals, val, semicolon} + return &OptionNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Name: name, + Equals: equals, + Val: val, + Semicolon: semicolon, + } +} + +// NewCompactOptionNode creates a new *OptionNode for a full compact declaration +// (as used in fields, enum values, and extension ranges). All arguments must be +// non-nil. +// - name: The token corresponding to the name of the option. +// - equals: The token corresponding to the "=" rune after the name. +// - val: The token corresponding to the option value. +func NewCompactOptionNode(name *OptionNameNode, equals *RuneNode, val ValueNode) *OptionNode { + if name == nil { + panic("name is nil") + } + if equals == nil { + panic("equals is nil") + } + if val == nil { + panic("val is nil") + } + children := []Node{name, equals, val} + return &OptionNode{ + compositeNode: compositeNode{ + children: children, + }, + Name: name, + Equals: equals, + Val: val, + } +} + +func (n *OptionNode) GetName() Node { + return n.Name +} + +func (n *OptionNode) GetValue() ValueNode { + return n.Val +} + +// OptionNameNode represents an option name or even a traversal through message +// types to name a nested option field. Example: +// +// (foo.bar).baz.(bob) +type OptionNameNode struct { + compositeNode + Parts []*FieldReferenceNode + // Dots represent the separating '.' characters between name parts. The + // length of this slice must be exactly len(Parts)-1, each item in Parts + // having a corresponding item in this slice *except the last* (since a + // trailing dot is not allowed). + // + // These do *not* include dots that are inside of an extension name. For + // example: (foo.bar).baz.(bob) has three parts: + // 1. (foo.bar) - an extension name + // 2. baz - a regular field in foo.bar + // 3. (bob) - an extension field in baz + // Note that the dot in foo.bar will thus not be present in Dots but is + // instead in Parts[0]. + Dots []*RuneNode +} + +// NewOptionNameNode creates a new *OptionNameNode. The dots arg must have a +// length that is one less than the length of parts. The parts arg must not be +// empty. +func NewOptionNameNode(parts []*FieldReferenceNode, dots []*RuneNode) *OptionNameNode { + if len(parts) == 0 { + panic("must have at least one part") + } + if len(dots) != len(parts)-1 { + panic(fmt.Sprintf("%d parts requires %d dots, not %d", len(parts), len(parts)-1, len(dots))) + } + children := make([]Node, 0, len(parts)*2-1) + for i, part := range parts { + if part == nil { + panic(fmt.Sprintf("parts[%d] is nil", i)) + } + if i > 0 { + if dots[i-1] == nil { + panic(fmt.Sprintf("dots[%d] is nil", i-1)) + } + children = append(children, dots[i-1]) + } + children = append(children, part) + } + return &OptionNameNode{ + compositeNode: compositeNode{ + children: children, + }, + Parts: parts, + Dots: dots, + } +} + +// FieldReferenceNode is a reference to a field name. It can indicate a regular +// field (simple unqualified name) or an extension field (possibly-qualified +// name that is enclosed either in brackets or parentheses). +// +// This is used in options to indicate the names of custom options (which are +// actually extensions), in which case the name is enclosed in parentheses "(" +// and ")". It is also used in message literals to set extension fields, in +// which case the name is enclosed in square brackets "[" and "]". +// +// Example: +// (foo.bar) +type FieldReferenceNode struct { + compositeNode + Open *RuneNode // only present for extension names + Name IdentValueNode + Close *RuneNode // only present for extension names +} + +// NewFieldReferenceNode creates a new *FieldReferenceNode for a regular field. +// The name arg must not be nil. +func NewFieldReferenceNode(name *IdentNode) *FieldReferenceNode { + if name == nil { + panic("name is nil") + } + children := []Node{name} + return &FieldReferenceNode{ + compositeNode: compositeNode{ + children: children, + }, + Name: name, + } +} + +// NewExtensionFieldReferenceNode creates a new *FieldReferenceNode for an +// extension field. All args must be non-nil. The openSym and closeSym runes +// should be "(" and ")" or "[" and "]". +func NewExtensionFieldReferenceNode(openSym *RuneNode, name IdentValueNode, closeSym *RuneNode) *FieldReferenceNode { + if name == nil { + panic("name is nil") + } + if openSym == nil { + panic("openSym is nil") + } + if closeSym == nil { + panic("closeSym is nil") + } + children := []Node{openSym, name, closeSym} + return &FieldReferenceNode{ + compositeNode: compositeNode{ + children: children, + }, + Open: openSym, + Name: name, + Close: closeSym, + } +} + +// IsExtension reports if this is an extension name or not (e.g. enclosed in +// punctuation, such as parentheses or brackets). +func (a *FieldReferenceNode) IsExtension() bool { + return a.Open != nil +} + +func (a *FieldReferenceNode) Value() string { + if a.Open != nil { + return string(a.Open.Rune) + string(a.Name.AsIdentifier()) + string(a.Close.Rune) + } else { + return string(a.Name.AsIdentifier()) + } +} + +// CompactOptionsNode represents a compact options declaration, as used with +// fields, enum values, and extension ranges. Example: +// +// [deprecated = true, json_name = "foo_bar"] +type CompactOptionsNode struct { + compositeNode + OpenBracket *RuneNode + Options []*OptionNode + // Commas represent the separating ',' characters between options. The + // length of this slice must be exactly len(Options)-1, with each item + // in Options having a corresponding item in this slice *except the last* + // (since a trailing comma is not allowed). + Commas []*RuneNode + CloseBracket *RuneNode +} + +// NewCompactOptionsNode creates a *CompactOptionsNode. All args must be +// non-nil. The commas arg must have a length that is one less than the +// length of opts. The opts arg must not be empty. +func NewCompactOptionsNode(openBracket *RuneNode, opts []*OptionNode, commas []*RuneNode, closeBracket *RuneNode) *CompactOptionsNode { + if openBracket == nil { + panic("openBracket is nil") + } + if closeBracket == nil { + panic("closeBracket is nil") + } + if len(opts) == 0 { + panic("must have at least one part") + } + if len(commas) != len(opts)-1 { + panic(fmt.Sprintf("%d opts requires %d commas, not %d", len(opts), len(opts)-1, len(commas))) + } + children := make([]Node, 0, len(opts)*2+1) + children = append(children, openBracket) + for i, opt := range opts { + if i > 0 { + if commas[i-1] == nil { + panic(fmt.Sprintf("commas[%d] is nil", i-1)) + } + children = append(children, commas[i-1]) + } + if opt == nil { + panic(fmt.Sprintf("opts[%d] is nil", i)) + } + children = append(children, opt) + } + children = append(children, closeBracket) + + return &CompactOptionsNode{ + compositeNode: compositeNode{ + children: children, + }, + OpenBracket: openBracket, + Options: opts, + Commas: commas, + CloseBracket: closeBracket, + } +} + +func (e *CompactOptionsNode) GetElements() []*OptionNode { + if e == nil { + return nil + } + return e.Options +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/print.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/print.go new file mode 100644 index 00000000000..271200c7323 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/print.go @@ -0,0 +1,86 @@ +package ast + +import "io" + +// Print prints the given AST node to the given output. This operation +// basically walks the AST and, for each TerminalNode, prints the node's +// leading comments, leading whitespace, the node's raw text, and then +// any trailing comments. If the given node is a *FileNode, it will then +// also print the file's FinalComments and FinalWhitespace. +func Print(w io.Writer, node Node) error { + sw, ok := w.(stringWriter) + if !ok { + sw = &strWriter{w} + } + var err error + Walk(node, func(n Node) (bool, VisitFunc) { + if err != nil { + return false, nil + } + token, ok := n.(TerminalNode) + if !ok { + return true, nil + } + + err = printComments(sw, token.LeadingComments()) + if err != nil { + return false, nil + } + + _, err = sw.WriteString(token.LeadingWhitespace()) + if err != nil { + return false, nil + } + + _, err = sw.WriteString(token.RawText()) + if err != nil { + return false, nil + } + + err = printComments(sw, token.TrailingComments()) + return false, nil + }) + if err != nil { + return err + } + + if file, ok := node.(*FileNode); ok { + err = printComments(sw, file.FinalComments) + if err != nil { + return err + } + _, err = sw.WriteString(file.FinalWhitespace) + return err + } + + return nil +} + +func printComments(sw stringWriter, comments []Comment) error { + for _, comment := range comments { + if _, err := sw.WriteString(comment.LeadingWhitespace); err != nil { + return err + } + if _, err := sw.WriteString(comment.Text); err != nil { + return err + } + } + return nil +} + +// many io.Writer impls also provide a string-based method +type stringWriter interface { + WriteString(s string) (n int, err error) +} + +// adapter, in case the given writer does NOT provide a string-based method +type strWriter struct { + io.Writer +} + +func (s *strWriter) WriteString(str string) (int, error) { + if str == "" { + return 0, nil + } + return s.Write([]byte(str)) +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/ranges.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/ranges.go new file mode 100644 index 00000000000..341676f3330 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/ranges.go @@ -0,0 +1,305 @@ +package ast + +import "fmt" + +// ExtensionRangeNode represents an extension range declaration in an extendable +// message. Example: +// +// extensions 100 to max; +type ExtensionRangeNode struct { + compositeNode + Keyword *KeywordNode + Ranges []*RangeNode + // Commas represent the separating ',' characters between ranges. The + // length of this slice must be exactly len(Ranges)-1, each item in Ranges + // having a corresponding item in this slice *except the last* (since a + // trailing comma is not allowed). + Commas []*RuneNode + Options *CompactOptionsNode + Semicolon *RuneNode +} + +func (e *ExtensionRangeNode) msgElement() {} + +// NewExtensionRangeNode creates a new *ExtensionRangeNode. All args must be +// non-nil except opts, which may be nil. +// - keyword: The token corresponding to the "extends" keyword. +// - ranges: One or more range expressions. +// - commas: Tokens that represent the "," runes that delimit the range expressions. +// The length of commas must be one less than the length of ranges. +// - opts: The node corresponding to options that apply to each of the ranges. +// - semicolon The token corresponding to the ";" rune that ends the declaration. +func NewExtensionRangeNode(keyword *KeywordNode, ranges []*RangeNode, commas []*RuneNode, opts *CompactOptionsNode, semicolon *RuneNode) *ExtensionRangeNode { + if keyword == nil { + panic("keyword is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + if len(ranges) == 0 { + panic("must have at least one range") + } + if len(commas) != len(ranges)-1 { + panic(fmt.Sprintf("%d ranges requires %d commas, not %d", len(ranges), len(ranges)-1, len(commas))) + } + numChildren := len(ranges)*2 + 1 + if opts != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + children = append(children, keyword) + for i, rng := range ranges { + if i > 0 { + if commas[i-1] == nil { + panic(fmt.Sprintf("commas[%d] is nil", i-1)) + } + children = append(children, commas[i-1]) + } + if rng == nil { + panic(fmt.Sprintf("ranges[%d] is nil", i)) + } + children = append(children, rng) + } + if opts != nil { + children = append(children, opts) + } + children = append(children, semicolon) + return &ExtensionRangeNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Ranges: ranges, + Commas: commas, + Options: opts, + Semicolon: semicolon, + } +} + +// RangeDeclNode is a placeholder interface for AST nodes that represent +// numeric values. This allows NoSourceNode to be used in place of *RangeNode +// for some usages. +type RangeDeclNode interface { + Node + RangeStart() Node + RangeEnd() Node +} + +var _ RangeDeclNode = (*RangeNode)(nil) +var _ RangeDeclNode = NoSourceNode{} + +// RangeNode represents a range expression, used in both extension ranges and +// reserved ranges. Example: +// +// 1000 to max +type RangeNode struct { + compositeNode + StartVal IntValueNode + // if To is non-nil, then exactly one of EndVal or Max must also be non-nil + To *KeywordNode + // EndVal and Max are mutually exclusive + EndVal IntValueNode + Max *KeywordNode +} + +// NewRangeNode creates a new *RangeNode. The start argument must be non-nil. +// The to argument represents the "to" keyword. If present (i.e. if it is non-nil), +// then so must be exactly one of end or max. If max is non-nil, it indicates a +// "100 to max" style range. But if end is non-nil, the end of the range is a +// literal, such as "100 to 200". +func NewRangeNode(start IntValueNode, to *KeywordNode, end IntValueNode, max *KeywordNode) *RangeNode { + if start == nil { + panic("start is nil") + } + numChildren := 1 + if to != nil { + if end == nil && max == nil { + panic("to is not nil, but end and max both are") + } + if end != nil && max != nil { + panic("end and max cannot be both non-nil") + } + numChildren = 3 + } else { + if end != nil { + panic("to is nil, but end is not") + } + if max != nil { + panic("to is nil, but max is not") + } + } + children := make([]Node, 0, numChildren) + children = append(children, start) + if to != nil { + children = append(children, to) + if end != nil { + children = append(children, end) + } else { + children = append(children, max) + } + } + return &RangeNode{ + compositeNode: compositeNode{ + children: children, + }, + StartVal: start, + To: to, + EndVal: end, + Max: max, + } +} + +func (n *RangeNode) RangeStart() Node { + return n.StartVal +} + +func (n *RangeNode) RangeEnd() Node { + if n.Max != nil { + return n.Max + } + if n.EndVal != nil { + return n.EndVal + } + return n.StartVal +} + +func (n *RangeNode) StartValue() interface{} { + return n.StartVal.Value() +} + +func (n *RangeNode) StartValueAsInt32(min, max int32) (int32, bool) { + return AsInt32(n.StartVal, min, max) +} + +func (n *RangeNode) EndValue() interface{} { + if n.EndVal == nil { + return nil + } + return n.EndVal.Value() +} + +func (n *RangeNode) EndValueAsInt32(min, max int32) (int32, bool) { + if n.Max != nil { + return max, true + } + if n.EndVal == nil { + return n.StartValueAsInt32(min, max) + } + return AsInt32(n.EndVal, min, max) +} + +// ReservedNode represents reserved declaration, whic can be used to reserve +// either names or numbers. Examples: +// +// reserved 1, 10-12, 15; +// reserved "foo", "bar", "baz"; +type ReservedNode struct { + compositeNode + Keyword *KeywordNode + // If non-empty, this node represents reserved ranges and Names will be empty. + Ranges []*RangeNode + // If non-empty, this node represents reserved names and Ranges will be empty. + Names []StringValueNode + // Commas represent the separating ',' characters between options. The + // length of this slice must be exactly len(Ranges)-1 or len(Names)-1, depending + // on whether this node represents reserved ranges or reserved names. Each item + // in Ranges or Names has a corresponding item in this slice *except the last* + // (since a trailing comma is not allowed). + Commas []*RuneNode + Semicolon *RuneNode +} + +func (*ReservedNode) msgElement() {} +func (*ReservedNode) enumElement() {} + +// NewReservedRangesNode creates a new *ReservedNode that represents reserved +// numeric ranges. All args must be non-nil. +// - keyword: The token corresponding to the "reserved" keyword. +// - ranges: One or more range expressions. +// - commas: Tokens that represent the "," runes that delimit the range expressions. +// The length of commas must be one less than the length of ranges. +// - semicolon The token corresponding to the ";" rune that ends the declaration. +func NewReservedRangesNode(keyword *KeywordNode, ranges []*RangeNode, commas []*RuneNode, semicolon *RuneNode) *ReservedNode { + if keyword == nil { + panic("keyword is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + if len(ranges) == 0 { + panic("must have at least one range") + } + if len(commas) != len(ranges)-1 { + panic(fmt.Sprintf("%d ranges requires %d commas, not %d", len(ranges), len(ranges)-1, len(commas))) + } + children := make([]Node, 0, len(ranges)*2+1) + children = append(children, keyword) + for i, rng := range ranges { + if i > 0 { + if commas[i-1] == nil { + panic(fmt.Sprintf("commas[%d] is nil", i-1)) + } + children = append(children, commas[i-1]) + } + if rng == nil { + panic(fmt.Sprintf("ranges[%d] is nil", i)) + } + children = append(children, rng) + } + children = append(children, semicolon) + return &ReservedNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Ranges: ranges, + Commas: commas, + Semicolon: semicolon, + } +} + +// NewReservedNamesNode creates a new *ReservedNode that represents reserved +// names. All args must be non-nil. +// - keyword: The token corresponding to the "reserved" keyword. +// - names: One or more names. +// - commas: Tokens that represent the "," runes that delimit the names. +// The length of commas must be one less than the length of names. +// - semicolon The token corresponding to the ";" rune that ends the declaration. +func NewReservedNamesNode(keyword *KeywordNode, names []StringValueNode, commas []*RuneNode, semicolon *RuneNode) *ReservedNode { + if keyword == nil { + panic("keyword is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + if len(names) == 0 { + panic("must have at least one name") + } + if len(commas) != len(names)-1 { + panic(fmt.Sprintf("%d names requires %d commas, not %d", len(names), len(names)-1, len(commas))) + } + children := make([]Node, 0, len(names)*2+1) + children = append(children, keyword) + for i, name := range names { + if i > 0 { + if commas[i-1] == nil { + panic(fmt.Sprintf("commas[%d] is nil", i-1)) + } + children = append(children, commas[i-1]) + } + if name == nil { + panic(fmt.Sprintf("names[%d] is nil", i)) + } + children = append(children, name) + } + children = append(children, semicolon) + return &ReservedNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Names: names, + Commas: commas, + Semicolon: semicolon, + } +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/service.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/service.go new file mode 100644 index 00000000000..d8cfe8b5a11 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/service.go @@ -0,0 +1,273 @@ +package ast + +import "fmt" + +// ServiceNode represents a service declaration. Example: +// +// service Foo { +// rpc Bar (Baz) returns (Bob); +// rpc Frobnitz (stream Parts) returns (Gyzmeaux); +// } +type ServiceNode struct { + compositeNode + Keyword *KeywordNode + Name *IdentNode + OpenBrace *RuneNode + Decls []ServiceElement + CloseBrace *RuneNode +} + +func (*ServiceNode) fileElement() {} + +// NewServiceNode creates a new *ServiceNode. All arguments must be non-nil. +// - keyword: The token corresponding to the "service" keyword. +// - name: The token corresponding to the service's name. +// - openBrace: The token corresponding to the "{" rune that starts the body. +// - decls: All declarations inside the service body. +// - closeBrace: The token corresponding to the "}" rune that ends the body. +func NewServiceNode(keyword *KeywordNode, name *IdentNode, openBrace *RuneNode, decls []ServiceElement, closeBrace *RuneNode) *ServiceNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if openBrace == nil { + panic("openBrace is nil") + } + if closeBrace == nil { + panic("closeBrace is nil") + } + children := make([]Node, 0, 4+len(decls)) + children = append(children, keyword, name, openBrace) + for _, decl := range decls { + children = append(children, decl) + } + children = append(children, closeBrace) + + for _, decl := range decls { + switch decl := decl.(type) { + case *OptionNode, *RPCNode, *EmptyDeclNode: + default: + panic(fmt.Sprintf("invalid ServiceElement type: %T", decl)) + } + } + + return &ServiceNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Name: name, + OpenBrace: openBrace, + Decls: decls, + CloseBrace: closeBrace, + } +} + +// ServiceElement is an interface implemented by all AST nodes that can +// appear in the body of a service declaration. +type ServiceElement interface { + Node + serviceElement() +} + +var _ ServiceElement = (*OptionNode)(nil) +var _ ServiceElement = (*RPCNode)(nil) +var _ ServiceElement = (*EmptyDeclNode)(nil) + +// RPCDeclNode is a placeholder interface for AST nodes that represent RPC +// declarations. This allows NoSourceNode to be used in place of *RPCNode +// for some usages. +type RPCDeclNode interface { + Node + GetInputType() Node + GetOutputType() Node +} + +var _ RPCDeclNode = (*RPCNode)(nil) +var _ RPCDeclNode = NoSourceNode{} + +// RPCNode represents an RPC declaration. Example: +// +// rpc Foo (Bar) returns (Baz); +type RPCNode struct { + compositeNode + Keyword *KeywordNode + Name *IdentNode + Input *RPCTypeNode + Returns *KeywordNode + Output *RPCTypeNode + Semicolon *RuneNode + OpenBrace *RuneNode + Decls []RPCElement + CloseBrace *RuneNode +} + +func (n *RPCNode) serviceElement() {} + +// NewRPCNode creates a new *RPCNode with no body. All arguments must be non-nil. +// - keyword: The token corresponding to the "rpc" keyword. +// - name: The token corresponding to the RPC's name. +// - input: The token corresponding to the RPC input message type. +// - returns: The token corresponding to the "returns" keyword that precedes the output type. +// - output: The token corresponding to the RPC output message type. +// - semicolon: The token corresponding to the ";" rune that ends the declaration. +func NewRPCNode(keyword *KeywordNode, name *IdentNode, input *RPCTypeNode, returns *KeywordNode, output *RPCTypeNode, semicolon *RuneNode) *RPCNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if input == nil { + panic("input is nil") + } + if returns == nil { + panic("returns is nil") + } + if output == nil { + panic("output is nil") + } + if semicolon == nil { + panic("semicolon is nil") + } + children := []Node{keyword, name, input, returns, output, semicolon} + return &RPCNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Name: name, + Input: input, + Returns: returns, + Output: output, + Semicolon: semicolon, + } +} + +// NewRPCNodeWithBody creates a new *RPCNode that includes a body (and possibly +// options). All arguments must be non-nil. +// - keyword: The token corresponding to the "rpc" keyword. +// - name: The token corresponding to the RPC's name. +// - input: The token corresponding to the RPC input message type. +// - returns: The token corresponding to the "returns" keyword that precedes the output type. +// - output: The token corresponding to the RPC output message type. +// - openBrace: The token corresponding to the "{" rune that starts the body. +// - decls: All declarations inside the RPC body. +// - closeBrace: The token corresponding to the "}" rune that ends the body. +func NewRPCNodeWithBody(keyword *KeywordNode, name *IdentNode, input *RPCTypeNode, returns *KeywordNode, output *RPCTypeNode, openBrace *RuneNode, decls []RPCElement, closeBrace *RuneNode) *RPCNode { + if keyword == nil { + panic("keyword is nil") + } + if name == nil { + panic("name is nil") + } + if input == nil { + panic("input is nil") + } + if returns == nil { + panic("returns is nil") + } + if output == nil { + panic("output is nil") + } + if openBrace == nil { + panic("openBrace is nil") + } + if closeBrace == nil { + panic("closeBrace is nil") + } + children := make([]Node, 0, 7+len(decls)) + children = append(children, keyword, name, input, returns, output, openBrace) + for _, decl := range decls { + children = append(children, decl) + } + children = append(children, closeBrace) + + for _, decl := range decls { + switch decl := decl.(type) { + case *OptionNode, *EmptyDeclNode: + default: + panic(fmt.Sprintf("invalid RPCElement type: %T", decl)) + } + } + + return &RPCNode{ + compositeNode: compositeNode{ + children: children, + }, + Keyword: keyword, + Name: name, + Input: input, + Returns: returns, + Output: output, + OpenBrace: openBrace, + Decls: decls, + CloseBrace: closeBrace, + } +} + +func (n *RPCNode) GetInputType() Node { + return n.Input.MessageType +} + +func (n *RPCNode) GetOutputType() Node { + return n.Output.MessageType +} + +// RPCElement is an interface implemented by all AST nodes that can +// appear in the body of an rpc declaration (aka method). +type RPCElement interface { + Node + methodElement() +} + +var _ RPCElement = (*OptionNode)(nil) +var _ RPCElement = (*EmptyDeclNode)(nil) + +// RPCTypeNode represents the declaration of a request or response type for an +// RPC. Example: +// +// (stream foo.Bar) +type RPCTypeNode struct { + compositeNode + OpenParen *RuneNode + Stream *KeywordNode + MessageType IdentValueNode + CloseParen *RuneNode +} + +// NewRPCTypeNode creates a new *RPCTypeNode. All arguments must be non-nil +// except stream, which may be nil. +// - openParen: The token corresponding to the "(" rune that starts the declaration. +// - stream: The token corresponding to the "stream" keyword or nil if not present. +// - msgType: The token corresponding to the message type's name. +// - closeParen: The token corresponding to the ")" rune that ends the declaration. +func NewRPCTypeNode(openParen *RuneNode, stream *KeywordNode, msgType IdentValueNode, closeParen *RuneNode) *RPCTypeNode { + if openParen == nil { + panic("openParen is nil") + } + if msgType == nil { + panic("msgType is nil") + } + if closeParen == nil { + panic("closeParen is nil") + } + var children []Node + if stream != nil { + children = []Node{openParen, stream, msgType, closeParen} + } else { + children = []Node{openParen, msgType, closeParen} + } + + return &RPCTypeNode{ + compositeNode: compositeNode{ + children: children, + }, + OpenParen: openParen, + Stream: stream, + MessageType: msgType, + CloseParen: closeParen, + } +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/source_pos.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/source_pos.go new file mode 100644 index 00000000000..7346a84c513 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/source_pos.go @@ -0,0 +1,38 @@ +package ast + +import "fmt" + +// SourcePos identifies a location in a proto source file. +type SourcePos struct { + Filename string + Line, Col int + Offset int +} + +func (pos SourcePos) String() string { + if pos.Line <= 0 || pos.Col <= 0 { + return pos.Filename + } + return fmt.Sprintf("%s:%d:%d", pos.Filename, pos.Line, pos.Col) +} + +// PosRange is a range of positions in a source file that indicates +// the span of some region of source, such as a single token or +// a sub-tree of the AST. +type PosRange struct { + Start, End SourcePos +} + +// Comment represents a single comment in a source file. It indicates +// the position of the comment and its contents. +type Comment struct { + // The location of the comment in the source file. + PosRange + // Any whitespace between the prior lexical element (either a token + // or other comment) and this comment. + LeadingWhitespace string + // The text of the comment, including any "//" or "/*" and "*/" + // symbols at the start and end. Single-line comments will include + // the trailing newline rune in Text. + Text string +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/values.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/values.go new file mode 100644 index 00000000000..b19ab47bc87 --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/values.go @@ -0,0 +1,563 @@ +package ast + +import ( + "fmt" + "math" + "strings" +) + +// ValueNode is an AST node that represents a literal value. +// +// It also includes references (e.g. IdentifierValueNode), which can be +// used as values in some contexts, such as describing the default value +// for a field, which can refer to an enum value. +// +// This also allows NoSourceNode to be used in place of a real value node +// for some usages. +type ValueNode interface { + Node + // Value returns a Go representation of the value. For scalars, this + // will be a string, int64, uint64, float64, or bool. This could also + // be an Identifier (e.g. IdentValueNodes). It can also be a composite + // literal: + // * For array literals, the type returned will be []ValueNode + // * For message literals, the type returned will be []*MessageFieldNode + Value() interface{} +} + +var _ ValueNode = (*IdentNode)(nil) +var _ ValueNode = (*CompoundIdentNode)(nil) +var _ ValueNode = (*StringLiteralNode)(nil) +var _ ValueNode = (*CompoundStringLiteralNode)(nil) +var _ ValueNode = (*UintLiteralNode)(nil) +var _ ValueNode = (*PositiveUintLiteralNode)(nil) +var _ ValueNode = (*NegativeIntLiteralNode)(nil) +var _ ValueNode = (*FloatLiteralNode)(nil) +var _ ValueNode = (*SpecialFloatLiteralNode)(nil) +var _ ValueNode = (*SignedFloatLiteralNode)(nil) +var _ ValueNode = (*BoolLiteralNode)(nil) +var _ ValueNode = (*ArrayLiteralNode)(nil) +var _ ValueNode = (*MessageLiteralNode)(nil) +var _ ValueNode = NoSourceNode{} + +// StringValueNode is an AST node that represents a string literal. +// Such a node can be a single literal (*StringLiteralNode) or a +// concatenation of multiple literals (*CompoundStringLiteralNode). +type StringValueNode interface { + ValueNode + AsString() string +} + +var _ StringValueNode = (*StringLiteralNode)(nil) +var _ StringValueNode = (*CompoundStringLiteralNode)(nil) + +// StringLiteralNode represents a simple string literal. Example: +// +// "proto2" +type StringLiteralNode struct { + terminalNode + // Val is the actual string value that the literal indicates. + Val string +} + +// NewStringLiteralNode creates a new *StringLiteralNode with the given val. +func NewStringLiteralNode(val string, info TokenInfo) *StringLiteralNode { + return &StringLiteralNode{ + terminalNode: info.asTerminalNode(), + Val: val, + } +} + +func (n *StringLiteralNode) Value() interface{} { + return n.AsString() +} + +func (n *StringLiteralNode) AsString() string { + return n.Val +} + +// CompoundStringLiteralNode represents a compound string literal, which is +// the concatenaton of adjacent string literals. Example: +// +// "this " "is" " all one " "string" +type CompoundStringLiteralNode struct { + compositeNode + Val string +} + +// NewCompoundLiteralStringNode creates a new *CompoundStringLiteralNode that +// consists of the given string components. The components argument may not be +// empty. +func NewCompoundLiteralStringNode(components ...*StringLiteralNode) *CompoundStringLiteralNode { + if len(components) == 0 { + panic("must have at least one component") + } + children := make([]Node, len(components)) + var b strings.Builder + for i, comp := range components { + children[i] = comp + b.WriteString(comp.Val) + } + return &CompoundStringLiteralNode{ + compositeNode: compositeNode{ + children: children, + }, + Val: b.String(), + } +} + +func (n *CompoundStringLiteralNode) Value() interface{} { + return n.AsString() +} + +func (n *CompoundStringLiteralNode) AsString() string { + return n.Val +} + +// IntValueNode is an AST node that represents an integer literal. If +// an integer literal is too large for an int64 (or uint64 for +// positive literals), it is represented instead by a FloatValueNode. +type IntValueNode interface { + ValueNode + AsInt64() (int64, bool) + AsUint64() (uint64, bool) +} + +// AsInt32 range checks the given int value and returns its value is +// in the range or 0, false if it is outside the range. +func AsInt32(n IntValueNode, min, max int32) (int32, bool) { + i, ok := n.AsInt64() + if !ok { + return 0, false + } + if i < int64(min) || i > int64(max) { + return 0, false + } + return int32(i), true +} + +var _ IntValueNode = (*UintLiteralNode)(nil) +var _ IntValueNode = (*PositiveUintLiteralNode)(nil) +var _ IntValueNode = (*NegativeIntLiteralNode)(nil) + +// UintLiteralNode represents a simple integer literal with no sign character. +type UintLiteralNode struct { + terminalNode + // Val is the numeric value indicated by the literal + Val uint64 +} + +// NewUintLiteralNode creates a new *UintLiteralNode with the given val. +func NewUintLiteralNode(val uint64, info TokenInfo) *UintLiteralNode { + return &UintLiteralNode{ + terminalNode: info.asTerminalNode(), + Val: val, + } +} + +func (n *UintLiteralNode) Value() interface{} { + return n.Val +} + +func (n *UintLiteralNode) AsInt64() (int64, bool) { + if n.Val > math.MaxInt64 { + return 0, false + } + return int64(n.Val), true +} + +func (n *UintLiteralNode) AsUint64() (uint64, bool) { + return n.Val, true +} + +func (n *UintLiteralNode) AsFloat() float64 { + return float64(n.Val) +} + +// PositiveUintLiteralNode represents an integer literal with a positive (+) sign. +type PositiveUintLiteralNode struct { + compositeNode + Plus *RuneNode + Uint *UintLiteralNode + Val uint64 +} + +// NewPositiveUintLiteralNode creates a new *PositiveUintLiteralNode. Both +// arguments must be non-nil. +func NewPositiveUintLiteralNode(sign *RuneNode, i *UintLiteralNode) *PositiveUintLiteralNode { + if sign == nil { + panic("sign is nil") + } + if i == nil { + panic("i is nil") + } + children := []Node{sign, i} + return &PositiveUintLiteralNode{ + compositeNode: compositeNode{ + children: children, + }, + Plus: sign, + Uint: i, + Val: i.Val, + } +} + +func (n *PositiveUintLiteralNode) Value() interface{} { + return n.Val +} + +func (n *PositiveUintLiteralNode) AsInt64() (int64, bool) { + if n.Val > math.MaxInt64 { + return 0, false + } + return int64(n.Val), true +} + +func (n *PositiveUintLiteralNode) AsUint64() (uint64, bool) { + return n.Val, true +} + +// NegativeIntLiteralNode represents an integer literal with a negative (-) sign. +type NegativeIntLiteralNode struct { + compositeNode + Minus *RuneNode + Uint *UintLiteralNode + Val int64 +} + +// NewNegativeIntLiteralNode creates a new *NegativeIntLiteralNode. Both +// arguments must be non-nil. +func NewNegativeIntLiteralNode(sign *RuneNode, i *UintLiteralNode) *NegativeIntLiteralNode { + if sign == nil { + panic("sign is nil") + } + if i == nil { + panic("i is nil") + } + children := []Node{sign, i} + return &NegativeIntLiteralNode{ + compositeNode: compositeNode{ + children: children, + }, + Minus: sign, + Uint: i, + Val: -int64(i.Val), + } +} + +func (n *NegativeIntLiteralNode) Value() interface{} { + return n.Val +} + +func (n *NegativeIntLiteralNode) AsInt64() (int64, bool) { + return n.Val, true +} + +func (n *NegativeIntLiteralNode) AsUint64() (uint64, bool) { + if n.Val < 0 { + return 0, false + } + return uint64(n.Val), true +} + +// FloatValueNode is an AST node that represents a numeric literal with +// a floating point, in scientific notation, or too large to fit in an +// int64 or uint64. +type FloatValueNode interface { + ValueNode + AsFloat() float64 +} + +var _ FloatValueNode = (*FloatLiteralNode)(nil) +var _ FloatValueNode = (*SpecialFloatLiteralNode)(nil) +var _ FloatValueNode = (*UintLiteralNode)(nil) + +// FloatLiteralNode represents a floating point numeric literal. +type FloatLiteralNode struct { + terminalNode + // Val is the numeric value indicated by the literal + Val float64 +} + +// NewFloatLiteralNode creates a new *FloatLiteralNode with the given val. +func NewFloatLiteralNode(val float64, info TokenInfo) *FloatLiteralNode { + return &FloatLiteralNode{ + terminalNode: info.asTerminalNode(), + Val: val, + } +} + +func (n *FloatLiteralNode) Value() interface{} { + return n.AsFloat() +} + +func (n *FloatLiteralNode) AsFloat() float64 { + return n.Val +} + +// SpecialFloatLiteralNode represents a special floating point numeric literal +// for "inf" and "nan" values. +type SpecialFloatLiteralNode struct { + *KeywordNode + Val float64 +} + +// NewSpecialFloatLiteralNode returns a new *SpecialFloatLiteralNode for the +// given keyword, which must be "inf" or "nan". +func NewSpecialFloatLiteralNode(name *KeywordNode) *SpecialFloatLiteralNode { + var f float64 + if name.Val == "inf" { + f = math.Inf(1) + } else { + f = math.NaN() + } + return &SpecialFloatLiteralNode{ + KeywordNode: name, + Val: f, + } +} + +func (n *SpecialFloatLiteralNode) Value() interface{} { + return n.AsFloat() +} + +func (n *SpecialFloatLiteralNode) AsFloat() float64 { + return n.Val +} + +// SignedFloatLiteralNode represents a signed floating point number. +type SignedFloatLiteralNode struct { + compositeNode + Sign *RuneNode + Float FloatValueNode + Val float64 +} + +// NewSignedFloatLiteralNode creates a new *SignedFloatLiteralNode. Both +// arguments must be non-nil. +func NewSignedFloatLiteralNode(sign *RuneNode, f FloatValueNode) *SignedFloatLiteralNode { + if sign == nil { + panic("sign is nil") + } + if f == nil { + panic("f is nil") + } + children := []Node{sign, f} + val := f.AsFloat() + if sign.Rune == '-' { + val = -val + } + return &SignedFloatLiteralNode{ + compositeNode: compositeNode{ + children: children, + }, + Sign: sign, + Float: f, + Val: val, + } +} + +func (n *SignedFloatLiteralNode) Value() interface{} { + return n.Val +} + +func (n *SignedFloatLiteralNode) AsFloat() float64 { + return n.Val +} + +// BoolLiteralNode represents a boolean literal. +type BoolLiteralNode struct { + *KeywordNode + Val bool +} + +// NewBoolLiteralNode returns a new *BoolLiteralNode for the given keyword, +// which must be "true" or "false". +func NewBoolLiteralNode(name *KeywordNode) *BoolLiteralNode { + return &BoolLiteralNode{ + KeywordNode: name, + Val: name.Val == "true", + } +} + +func (n *BoolLiteralNode) Value() interface{} { + return n.Val +} + +// ArrayLiteralNode represents an array literal, which is only allowed inside of +// a MessageLiteralNode, to indicate values for a repeated field. Example: +// +// ["foo", "bar", "baz"] +type ArrayLiteralNode struct { + compositeNode + OpenBracket *RuneNode + Elements []ValueNode + // Commas represent the separating ',' characters between elements. The + // length of this slice must be exactly len(Elements)-1, with each item + // in Elements having a corresponding item in this slice *except the last* + // (since a trailing comma is not allowed). + Commas []*RuneNode + CloseBracket *RuneNode +} + +// NewArrayLiteralNode creates a new *ArrayLiteralNode. The openBracket and +// closeBracket args must be non-nil and represent the "[" and "]" runes that +// surround the array values. The given commas arg must have a length that is +// one less than the length of the vals arg. However, vals may be empty, in +// which case commas must also be empty. +func NewArrayLiteralNode(openBracket *RuneNode, vals []ValueNode, commas []*RuneNode, closeBracket *RuneNode) *ArrayLiteralNode { + if openBracket == nil { + panic("openBracket is nil") + } + if closeBracket == nil { + panic("closeBracket is nil") + } + if len(vals) == 0 && len(commas) != 0 { + panic("vals is empty but commas is not") + } + if len(vals) > 0 && len(commas) != len(vals)-1 { + panic(fmt.Sprintf("%d vals requires %d commas, not %d", len(vals), len(vals)-1, len(commas))) + } + children := make([]Node, 0, len(vals)*2+1) + children = append(children, openBracket) + for i, val := range vals { + if i > 0 { + if commas[i-1] == nil { + panic(fmt.Sprintf("commas[%d] is nil", i-1)) + } + children = append(children, commas[i-1]) + } + if val == nil { + panic(fmt.Sprintf("vals[%d] is nil", i)) + } + children = append(children, val) + } + children = append(children, closeBracket) + + return &ArrayLiteralNode{ + compositeNode: compositeNode{ + children: children, + }, + OpenBracket: openBracket, + Elements: vals, + Commas: commas, + CloseBracket: closeBracket, + } +} + +func (n *ArrayLiteralNode) Value() interface{} { + return n.Elements +} + +// MessageLiteralNode represents a message literal, which is compatible with the +// protobuf text format and can be used for custom options with message types. +// Example: +// +// { foo:1 foo:2 foo:3 bar: } +type MessageLiteralNode struct { + compositeNode + Open *RuneNode // should be '{' or '<' + Elements []*MessageFieldNode + // Separator characters between elements, which can be either ',' + // or ';' if present. This slice must be exactly len(Elements) in + // length, with each item in Elements having one corresponding item + // in Seps. Separators in message literals are optional, so a given + // item in this slice may be nil to indicate absence of a separator. + Seps []*RuneNode + Close *RuneNode // should be '}' or '>', depending on Open +} + +// NewMessageLiteralNode creates a new *MessageLiteralNode. The openSym and +// closeSym runes must not be nil and should be "{" and "}" or "<" and ">". +// +// Unlike separators (dots and commas) used for other AST nodes that represent +// a list of elements, the seps arg must be the SAME length as vals, and it may +// contain nil values to indicate absence of a separator (in fact, it could be +// all nils). +func NewMessageLiteralNode(openSym *RuneNode, vals []*MessageFieldNode, seps []*RuneNode, closeSym *RuneNode) *MessageLiteralNode { + if openSym == nil { + panic("openSym is nil") + } + if closeSym == nil { + panic("closeSym is nil") + } + if len(seps) != len(vals) { + panic(fmt.Sprintf("%d vals requires %d commas, not %d", len(vals), len(vals), len(seps))) + } + numChildren := len(vals) + 2 + for _, sep := range seps { + if sep != nil { + numChildren++ + } + } + children := make([]Node, 0, numChildren) + children = append(children, openSym) + for i, val := range vals { + if val == nil { + panic(fmt.Sprintf("vals[%d] is nil", i)) + } + children = append(children, val) + if seps[i] != nil { + children = append(children, seps[i]) + } + } + children = append(children, closeSym) + + return &MessageLiteralNode{ + compositeNode: compositeNode{ + children: children, + }, + Open: openSym, + Elements: vals, + Seps: seps, + Close: closeSym, + } +} + +func (n *MessageLiteralNode) Value() interface{} { + return n.Elements +} + +// MessageFieldNode represents a single field (name and value) inside of a +// message literal. Example: +// +// foo:"bar" +type MessageFieldNode struct { + compositeNode + Name *FieldReferenceNode + // Sep represents the ':' separator between the name and value. If + // the value is a message literal (and thus starts with '<' or '{'), + // then the separator is optional, and thus may be nil. + Sep *RuneNode + Val ValueNode +} + +// NewMessageFieldNode creates a new *MessageFieldNode. All args except sep +// must be non-nil. +func NewMessageFieldNode(name *FieldReferenceNode, sep *RuneNode, val ValueNode) *MessageFieldNode { + if name == nil { + panic("name is nil") + } + if val == nil { + panic("val is nil") + } + numChildren := 2 + if sep != nil { + numChildren++ + } + children := make([]Node, 0, numChildren) + children = append(children, name) + if sep != nil { + children = append(children, sep) + } + children = append(children, val) + + return &MessageFieldNode{ + compositeNode: compositeNode{ + children: children, + }, + Name: name, + Sep: sep, + Val: val, + } +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/walk.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/walk.go new file mode 100644 index 00000000000..53301946b6a --- /dev/null +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/ast/walk.go @@ -0,0 +1,492 @@ +package ast + +// VisitFunc is used to examine a node in the AST when walking the tree. +// It returns true or false as to whether or not the descendants of the +// given node should be visited. If it returns true, the node's children +// will be visisted; if false, they will not. When returning true, it +// can also return a new VisitFunc to use for the children. If it returns +// (true, nil), then the current function will be re-used when visiting +// the children. +// +// See also the Visitor type. +type VisitFunc func(Node) (bool, VisitFunc) + +// Walk conducts a walk of the AST rooted at the given root using the +// given function. It performs a "pre-order traversal", visiting a +// given AST node before it visits that node's descendants. +func Walk(root Node, v VisitFunc) { + ok, next := v(root) + if !ok { + return + } + if next != nil { + v = next + } + if comp, ok := root.(CompositeNode); ok { + for _, child := range comp.Children() { + Walk(child, v) + } + } +} + +// Visitor provides a technique for walking the AST that allows for +// dynamic dispatch, where a particular function is invoked based on +// the runtime type of the argument. +// +// It consists of a number of functions, each of which matches a +// concrete Node type. It also includes functions for sub-interfaces +// of Node and the Node interface itself, to be used as broader +// "catch all" functions. +// +// To use a visitor, provide a function for the node types of +// interest and pass visitor.Visit as the function to a Walk operation. +// When a node is traversed, the corresponding function field of +// the visitor is invoked, if not nil. If the function for a node's +// concrete type is nil/absent but the function for an interface it +// implements is present, that interface visit function will be used +// instead. If no matching function is present, the traversal will +// continue. If a matching function is present, it will be invoked +// and its response determines how the traversal proceeds. +// +// Every visit function returns (bool, *Visitor). If the bool returned +// is false, the visited node's descendants are skipped. Otherwise, +// traversal will continue into the node's children. If the returned +// visitor is nil, the current visitor will continue to be used. But +// if a non-nil visitor is returned, it will be used to visit the +// node's children. +type Visitor struct { + // VisitFileNode is invoked when visiting a *FileNode in the AST. + VisitFileNode func(*FileNode) (bool, *Visitor) + // VisitSyntaxNode is invoked when visiting a *SyntaxNode in the AST. + VisitSyntaxNode func(*SyntaxNode) (bool, *Visitor) + // VisitPackageNode is invoked when visiting a *PackageNode in the AST. + VisitPackageNode func(*PackageNode) (bool, *Visitor) + // VisitImportNode is invoked when visiting an *ImportNode in the AST. + VisitImportNode func(*ImportNode) (bool, *Visitor) + // VisitOptionNode is invoked when visiting an *OptionNode in the AST. + VisitOptionNode func(*OptionNode) (bool, *Visitor) + // VisitOptionNameNode is invoked when visiting an *OptionNameNode in the AST. + VisitOptionNameNode func(*OptionNameNode) (bool, *Visitor) + // VisitFieldReferenceNode is invoked when visiting a *FieldReferenceNode in the AST. + VisitFieldReferenceNode func(*FieldReferenceNode) (bool, *Visitor) + // VisitCompactOptionsNode is invoked when visiting a *CompactOptionsNode in the AST. + VisitCompactOptionsNode func(*CompactOptionsNode) (bool, *Visitor) + // VisitMessageNode is invoked when visiting a *MessageNode in the AST. + VisitMessageNode func(*MessageNode) (bool, *Visitor) + // VisitExtendNode is invoked when visiting an *ExtendNode in the AST. + VisitExtendNode func(*ExtendNode) (bool, *Visitor) + // VisitExtensionRangeNode is invoked when visiting an *ExtensionRangeNode in the AST. + VisitExtensionRangeNode func(*ExtensionRangeNode) (bool, *Visitor) + // VisitReservedNode is invoked when visiting a *ReservedNode in the AST. + VisitReservedNode func(*ReservedNode) (bool, *Visitor) + // VisitRangeNode is invoked when visiting a *RangeNode in the AST. + VisitRangeNode func(*RangeNode) (bool, *Visitor) + // VisitFieldNode is invoked when visiting a *FieldNode in the AST. + VisitFieldNode func(*FieldNode) (bool, *Visitor) + // VisitGroupNode is invoked when visiting a *GroupNode in the AST. + VisitGroupNode func(*GroupNode) (bool, *Visitor) + // VisitMapFieldNode is invoked when visiting a *MapFieldNode in the AST. + VisitMapFieldNode func(*MapFieldNode) (bool, *Visitor) + // VisitMapTypeNode is invoked when visiting a *MapTypeNode in the AST. + VisitMapTypeNode func(*MapTypeNode) (bool, *Visitor) + // VisitOneOfNode is invoked when visiting a *OneOfNode in the AST. + VisitOneOfNode func(*OneOfNode) (bool, *Visitor) + // VisitEnumNode is invoked when visiting an *EnumNode in the AST. + VisitEnumNode func(*EnumNode) (bool, *Visitor) + // VisitEnumValueNode is invoked when visiting an *EnumValueNode in the AST. + VisitEnumValueNode func(*EnumValueNode) (bool, *Visitor) + // VisitServiceNode is invoked when visiting a *ServiceNode in the AST. + VisitServiceNode func(*ServiceNode) (bool, *Visitor) + // VisitRPCNode is invoked when visiting an *RPCNode in the AST. + VisitRPCNode func(*RPCNode) (bool, *Visitor) + // VisitRPCTypeNode is invoked when visiting an *RPCTypeNode in the AST. + VisitRPCTypeNode func(*RPCTypeNode) (bool, *Visitor) + // VisitIdentNode is invoked when visiting an *IdentNode in the AST. + VisitIdentNode func(*IdentNode) (bool, *Visitor) + // VisitCompoundIdentNode is invoked when visiting a *CompoundIdentNode in the AST. + VisitCompoundIdentNode func(*CompoundIdentNode) (bool, *Visitor) + // VisitStringLiteralNode is invoked when visiting a *StringLiteralNode in the AST. + VisitStringLiteralNode func(*StringLiteralNode) (bool, *Visitor) + // VisitCompoundStringLiteralNode is invoked when visiting a *CompoundStringLiteralNode in the AST. + VisitCompoundStringLiteralNode func(*CompoundStringLiteralNode) (bool, *Visitor) + // VisitUintLiteralNode is invoked when visiting a *UintLiteralNode in the AST. + VisitUintLiteralNode func(*UintLiteralNode) (bool, *Visitor) + // VisitPositiveUintLiteralNode is invoked when visiting a *PositiveUintLiteralNode in the AST. + VisitPositiveUintLiteralNode func(*PositiveUintLiteralNode) (bool, *Visitor) + // VisitNegativeIntLiteralNode is invoked when visiting a *NegativeIntLiteralNode in the AST. + VisitNegativeIntLiteralNode func(*NegativeIntLiteralNode) (bool, *Visitor) + // VisitFloatLiteralNode is invoked when visiting a *FloatLiteralNode in the AST. + VisitFloatLiteralNode func(*FloatLiteralNode) (bool, *Visitor) + // VisitSpecialFloatLiteralNode is invoked when visiting a *SpecialFloatLiteralNode in the AST. + VisitSpecialFloatLiteralNode func(*SpecialFloatLiteralNode) (bool, *Visitor) + // VisitSignedFloatLiteralNode is invoked when visiting a *SignedFloatLiteralNode in the AST. + VisitSignedFloatLiteralNode func(*SignedFloatLiteralNode) (bool, *Visitor) + // VisitBoolLiteralNode is invoked when visiting a *BoolLiteralNode in the AST. + VisitBoolLiteralNode func(*BoolLiteralNode) (bool, *Visitor) + // VisitArrayLiteralNode is invoked when visiting an *ArrayLiteralNode in the AST. + VisitArrayLiteralNode func(*ArrayLiteralNode) (bool, *Visitor) + // VisitMessageLiteralNode is invoked when visiting a *MessageLiteralNode in the AST. + VisitMessageLiteralNode func(*MessageLiteralNode) (bool, *Visitor) + // VisitMessageFieldNode is invoked when visiting a *MessageFieldNode in the AST. + VisitMessageFieldNode func(*MessageFieldNode) (bool, *Visitor) + // VisitKeywordNode is invoked when visiting a *KeywordNode in the AST. + VisitKeywordNode func(*KeywordNode) (bool, *Visitor) + // VisitRuneNode is invoked when visiting a *RuneNode in the AST. + VisitRuneNode func(*RuneNode) (bool, *Visitor) + // VisitEmptyDeclNode is invoked when visiting a *EmptyDeclNode in the AST. + VisitEmptyDeclNode func(*EmptyDeclNode) (bool, *Visitor) + + // VisitFieldDeclNode is invoked when visiting a FieldDeclNode in the AST. + // This function is used when no concrete type function is provided. If + // both this and VisitMessageDeclNode are provided, and a node implements + // both (such as *GroupNode and *MapFieldNode), this function will be + // invoked and not the other. + VisitFieldDeclNode func(FieldDeclNode) (bool, *Visitor) + // VisitMessageDeclNode is invoked when visiting a MessageDeclNode in the AST. + // This function is used when no concrete type function is provided. + VisitMessageDeclNode func(MessageDeclNode) (bool, *Visitor) + + // VisitIdentValueNode is invoked when visiting an IdentValueNode in the AST. + // This function is used when no concrete type function is provided. + VisitIdentValueNode func(IdentValueNode) (bool, *Visitor) + // VisitStringValueNode is invoked when visiting a StringValueNode in the AST. + // This function is used when no concrete type function is provided. + VisitStringValueNode func(StringValueNode) (bool, *Visitor) + // VisitIntValueNode is invoked when visiting an IntValueNode in the AST. + // This function is used when no concrete type function is provided. If + // both this and VisitFloatValueNode are provided, and a node implements + // both (such as *UintLiteralNode), this function will be invoked and + // not the other. + VisitIntValueNode func(IntValueNode) (bool, *Visitor) + // VisitFloatValueNode is invoked when visiting a FloatValueNode in the AST. + // This function is used when no concrete type function is provided. + VisitFloatValueNode func(FloatValueNode) (bool, *Visitor) + // VisitValueNode is invoked when visiting a ValueNode in the AST. This + // function is used when no concrete type function is provided and no + // more specific ValueNode function is provided that matches the node. + VisitValueNode func(ValueNode) (bool, *Visitor) + + // VisitTerminalNode is invoked when visiting a TerminalNode in the AST. + // This function is used when no concrete type function is provided + // no more specific interface type function is provided. + VisitTerminalNode func(TerminalNode) (bool, *Visitor) + // VisitCompositeNode is invoked when visiting a CompositeNode in the AST. + // This function is used when no concrete type function is provided + // no more specific interface type function is provided. + VisitCompositeNode func(CompositeNode) (bool, *Visitor) + // VisitNode is invoked when visiting a Node in the AST. This + // function is only used when no other more specific function is + // provided. + VisitNode func(Node) (bool, *Visitor) +} + +// Visit provides the Visitor's implementation of VisitFunc, to be +// used with Walk operations. +func (v *Visitor) Visit(n Node) (bool, VisitFunc) { + var ok, matched bool + var next *Visitor + switch n := n.(type) { + case *FileNode: + if v.VisitFileNode != nil { + matched = true + ok, next = v.VisitFileNode(n) + } + case *SyntaxNode: + if v.VisitSyntaxNode != nil { + matched = true + ok, next = v.VisitSyntaxNode(n) + } + case *PackageNode: + if v.VisitPackageNode != nil { + matched = true + ok, next = v.VisitPackageNode(n) + } + case *ImportNode: + if v.VisitImportNode != nil { + matched = true + ok, next = v.VisitImportNode(n) + } + case *OptionNode: + if v.VisitOptionNode != nil { + matched = true + ok, next = v.VisitOptionNode(n) + } + case *OptionNameNode: + if v.VisitOptionNameNode != nil { + matched = true + ok, next = v.VisitOptionNameNode(n) + } + case *FieldReferenceNode: + if v.VisitFieldReferenceNode != nil { + matched = true + ok, next = v.VisitFieldReferenceNode(n) + } + case *CompactOptionsNode: + if v.VisitCompactOptionsNode != nil { + matched = true + ok, next = v.VisitCompactOptionsNode(n) + } + case *MessageNode: + if v.VisitMessageNode != nil { + matched = true + ok, next = v.VisitMessageNode(n) + } + case *ExtendNode: + if v.VisitExtendNode != nil { + matched = true + ok, next = v.VisitExtendNode(n) + } + case *ExtensionRangeNode: + if v.VisitExtensionRangeNode != nil { + matched = true + ok, next = v.VisitExtensionRangeNode(n) + } + case *ReservedNode: + if v.VisitReservedNode != nil { + matched = true + ok, next = v.VisitReservedNode(n) + } + case *RangeNode: + if v.VisitRangeNode != nil { + matched = true + ok, next = v.VisitRangeNode(n) + } + case *FieldNode: + if v.VisitFieldNode != nil { + matched = true + ok, next = v.VisitFieldNode(n) + } + case *GroupNode: + if v.VisitGroupNode != nil { + matched = true + ok, next = v.VisitGroupNode(n) + } + case *MapFieldNode: + if v.VisitMapFieldNode != nil { + matched = true + ok, next = v.VisitMapFieldNode(n) + } + case *MapTypeNode: + if v.VisitMapTypeNode != nil { + matched = true + ok, next = v.VisitMapTypeNode(n) + } + case *OneOfNode: + if v.VisitOneOfNode != nil { + matched = true + ok, next = v.VisitOneOfNode(n) + } + case *EnumNode: + if v.VisitEnumNode != nil { + matched = true + ok, next = v.VisitEnumNode(n) + } + case *EnumValueNode: + if v.VisitEnumValueNode != nil { + matched = true + ok, next = v.VisitEnumValueNode(n) + } + case *ServiceNode: + if v.VisitServiceNode != nil { + matched = true + ok, next = v.VisitServiceNode(n) + } + case *RPCNode: + if v.VisitRPCNode != nil { + matched = true + ok, next = v.VisitRPCNode(n) + } + case *RPCTypeNode: + if v.VisitRPCTypeNode != nil { + matched = true + ok, next = v.VisitRPCTypeNode(n) + } + case *IdentNode: + if v.VisitIdentNode != nil { + matched = true + ok, next = v.VisitIdentNode(n) + } + case *CompoundIdentNode: + if v.VisitCompoundIdentNode != nil { + matched = true + ok, next = v.VisitCompoundIdentNode(n) + } + case *StringLiteralNode: + if v.VisitStringLiteralNode != nil { + matched = true + ok, next = v.VisitStringLiteralNode(n) + } + case *CompoundStringLiteralNode: + if v.VisitCompoundStringLiteralNode != nil { + matched = true + ok, next = v.VisitCompoundStringLiteralNode(n) + } + case *UintLiteralNode: + if v.VisitUintLiteralNode != nil { + matched = true + ok, next = v.VisitUintLiteralNode(n) + } + case *PositiveUintLiteralNode: + if v.VisitPositiveUintLiteralNode != nil { + matched = true + ok, next = v.VisitPositiveUintLiteralNode(n) + } + case *NegativeIntLiteralNode: + if v.VisitNegativeIntLiteralNode != nil { + matched = true + ok, next = v.VisitNegativeIntLiteralNode(n) + } + case *FloatLiteralNode: + if v.VisitFloatLiteralNode != nil { + matched = true + ok, next = v.VisitFloatLiteralNode(n) + } + case *SpecialFloatLiteralNode: + if v.VisitSpecialFloatLiteralNode != nil { + matched = true + ok, next = v.VisitSpecialFloatLiteralNode(n) + } + case *SignedFloatLiteralNode: + if v.VisitSignedFloatLiteralNode != nil { + matched = true + ok, next = v.VisitSignedFloatLiteralNode(n) + } + case *BoolLiteralNode: + if v.VisitBoolLiteralNode != nil { + matched = true + ok, next = v.VisitBoolLiteralNode(n) + } + case *ArrayLiteralNode: + if v.VisitArrayLiteralNode != nil { + matched = true + ok, next = v.VisitArrayLiteralNode(n) + } + case *MessageLiteralNode: + if v.VisitMessageLiteralNode != nil { + matched = true + ok, next = v.VisitMessageLiteralNode(n) + } + case *MessageFieldNode: + if v.VisitMessageFieldNode != nil { + matched = true + ok, next = v.VisitMessageFieldNode(n) + } + case *KeywordNode: + if v.VisitKeywordNode != nil { + matched = true + ok, next = v.VisitKeywordNode(n) + } + case *RuneNode: + if v.VisitRuneNode != nil { + matched = true + ok, next = v.VisitRuneNode(n) + } + case *EmptyDeclNode: + if v.VisitEmptyDeclNode != nil { + matched = true + ok, next = v.VisitEmptyDeclNode(n) + } + } + + if !matched { + // Visitor provided no concrete type visit function, so + // check interface types. We do this in several passes + // to provide "priority" for matched interfaces for nodes + // that actually implement more than one interface. + // + // For example, StringLiteralNode implements both + // StringValueNode and ValueNode. Both cases could match + // so the first case is what would match. So if we want + // to test against either, they need to be in different + // switch statements. + switch n := n.(type) { + case FieldDeclNode: + if v.VisitFieldDeclNode != nil { + matched = true + ok, next = v.VisitFieldDeclNode(n) + } + case IdentValueNode: + if v.VisitIdentValueNode != nil { + matched = true + ok, next = v.VisitIdentValueNode(n) + } + case StringValueNode: + if v.VisitStringValueNode != nil { + matched = true + ok, next = v.VisitStringValueNode(n) + } + case IntValueNode: + if v.VisitIntValueNode != nil { + matched = true + ok, next = v.VisitIntValueNode(n) + } + } + } + + if !matched { + // These two are excluded from the above switch so that + // if visitor provides both VisitIntValueNode and + // VisitFloatValueNode, we'll prefer VisitIntValueNode + // for *UintLiteralNode (which implements both). Similarly, + // that way we prefer VisitFieldDeclNode over + // VisitMessageDeclNode when visiting a *GroupNode. + switch n := n.(type) { + case FloatValueNode: + if v.VisitFloatValueNode != nil { + matched = true + ok, next = v.VisitFloatValueNode(n) + } + case MessageDeclNode: + if v.VisitMessageDeclNode != nil { + matched = true + ok, next = v.VisitMessageDeclNode(n) + } + } + } + + if !matched { + switch n := n.(type) { + case ValueNode: + if v.VisitValueNode != nil { + matched = true + ok, next = v.VisitValueNode(n) + } + } + } + + if !matched { + switch n := n.(type) { + case TerminalNode: + if v.VisitTerminalNode != nil { + matched = true + ok, next = v.VisitTerminalNode(n) + } + case CompositeNode: + if v.VisitCompositeNode != nil { + matched = true + ok, next = v.VisitCompositeNode(n) + } + } + } + + if !matched { + // finally, fallback to most generic visit function + if v.VisitNode != nil { + matched = true + ok, next = v.VisitNode(n) + } + } + + if !matched { + // keep descending with the current visitor + return true, nil + } + + if !ok { + return false, nil + } + if next != nil { + return true, next.Visit + } + return true, v.Visit +} diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/descriptor_protos.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/descriptor_protos.go index 08de8d57a68..41134541d7d 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/descriptor_protos.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/descriptor_protos.go @@ -10,69 +10,70 @@ import ( dpb "github.com/golang/protobuf/protoc-gen-go/descriptor" "github.com/jhump/protoreflect/desc/internal" + "github.com/jhump/protoreflect/desc/protoparse/ast" ) -func (r *parseResult) createFileDescriptor(filename string, file *fileNode) { +func (r *parseResult) createFileDescriptor(filename string, file *ast.FileNode) { fd := &dpb.FileDescriptorProto{Name: proto.String(filename)} r.fd = fd r.putFileNode(fd, file) isProto3 := false - if file.syntax != nil { - if file.syntax.syntax.val == "proto3" { + if file.Syntax != nil { + if file.Syntax.Syntax.AsString() == "proto3" { isProto3 = true - } else if file.syntax.syntax.val != "proto2" { - if r.errs.handleErrorWithPos(file.syntax.syntax.start(), `syntax value must be "proto2" or "proto3"`) != nil { + } else if file.Syntax.Syntax.AsString() != "proto2" { + if r.errs.handleErrorWithPos(file.Syntax.Syntax.Start(), `syntax value must be "proto2" or "proto3"`) != nil { return } } // proto2 is the default, so no need to set unless proto3 if isProto3 { - fd.Syntax = proto.String(file.syntax.syntax.val) + fd.Syntax = proto.String(file.Syntax.Syntax.AsString()) } } else { - r.errs.warn(file.start(), ErrNoSyntax) + r.errs.warn(file.Start(), ErrNoSyntax) } - for _, decl := range file.decls { + for _, decl := range file.Decls { if r.errs.err != nil { return } - if decl.enum != nil { - fd.EnumType = append(fd.EnumType, r.asEnumDescriptor(decl.enum)) - } else if decl.extend != nil { - r.addExtensions(decl.extend, &fd.Extension, &fd.MessageType, isProto3) - } else if decl.imp != nil { - file.imports = append(file.imports, decl.imp) + switch decl := decl.(type) { + case *ast.EnumNode: + fd.EnumType = append(fd.EnumType, r.asEnumDescriptor(decl)) + case *ast.ExtendNode: + r.addExtensions(decl, &fd.Extension, &fd.MessageType, isProto3) + case *ast.ImportNode: index := len(fd.Dependency) - fd.Dependency = append(fd.Dependency, decl.imp.name.val) - if decl.imp.public { + fd.Dependency = append(fd.Dependency, decl.Name.AsString()) + if decl.Public != nil { fd.PublicDependency = append(fd.PublicDependency, int32(index)) - } else if decl.imp.weak { + } else if decl.Weak != nil { fd.WeakDependency = append(fd.WeakDependency, int32(index)) } - } else if decl.message != nil { - fd.MessageType = append(fd.MessageType, r.asMessageDescriptor(decl.message, isProto3)) - } else if decl.option != nil { + case *ast.MessageNode: + fd.MessageType = append(fd.MessageType, r.asMessageDescriptor(decl, isProto3)) + case *ast.OptionNode: if fd.Options == nil { fd.Options = &dpb.FileOptions{} } - fd.Options.UninterpretedOption = append(fd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option)) - } else if decl.service != nil { - fd.Service = append(fd.Service, r.asServiceDescriptor(decl.service)) - } else if decl.pkg != nil { + fd.Options.UninterpretedOption = append(fd.Options.UninterpretedOption, r.asUninterpretedOption(decl)) + case *ast.ServiceNode: + fd.Service = append(fd.Service, r.asServiceDescriptor(decl)) + case *ast.PackageNode: if fd.Package != nil { - if r.errs.handleErrorWithPos(decl.pkg.start(), "files should have only one package declaration") != nil { + if r.errs.handleErrorWithPos(decl.Start(), "files should have only one package declaration") != nil { return } } - fd.Package = proto.String(decl.pkg.name.val) + fd.Package = proto.String(string(decl.Name.AsIdentifier())) } } } -func (r *parseResult) asUninterpretedOptions(nodes []*optionNode) []*dpb.UninterpretedOption { +func (r *parseResult) asUninterpretedOptions(nodes []*ast.OptionNode) []*dpb.UninterpretedOption { if len(nodes) == 0 { return nil } @@ -83,11 +84,11 @@ func (r *parseResult) asUninterpretedOptions(nodes []*optionNode) []*dpb.Uninter return opts } -func (r *parseResult) asUninterpretedOption(node *optionNode) *dpb.UninterpretedOption { - opt := &dpb.UninterpretedOption{Name: r.asUninterpretedOptionName(node.name.parts)} +func (r *parseResult) asUninterpretedOption(node *ast.OptionNode) *dpb.UninterpretedOption { + opt := &dpb.UninterpretedOption{Name: r.asUninterpretedOptionName(node.Name.Parts)} r.putOptionNode(opt, node) - switch val := node.val.value().(type) { + switch val := node.Val.Value().(type) { case bool: if val { opt.IdentifierValue = proto.String("true") @@ -102,27 +103,24 @@ func (r *parseResult) asUninterpretedOption(node *optionNode) *dpb.Uninterpreted opt.DoubleValue = proto.Float64(val) case string: opt.StringValue = []byte(val) - case identifier: + case ast.Identifier: opt.IdentifierValue = proto.String(string(val)) - case []*aggregateEntryNode: + case []*ast.MessageFieldNode: var buf bytes.Buffer aggToString(val, &buf) aggStr := buf.String() opt.AggregateValue = proto.String(aggStr) + //the grammar does not allow arrays here, so no case for []ast.ValueNode } return opt } -func (r *parseResult) asUninterpretedOptionName(parts []*optionNamePartNode) []*dpb.UninterpretedOption_NamePart { +func (r *parseResult) asUninterpretedOptionName(parts []*ast.FieldReferenceNode) []*dpb.UninterpretedOption_NamePart { ret := make([]*dpb.UninterpretedOption_NamePart, len(parts)) for i, part := range parts { - txt := part.text.val - if !part.isExtension { - txt = part.text.val[part.offset : part.offset+part.length] - } np := &dpb.UninterpretedOption_NamePart{ - NamePart: proto.String(txt), - IsExtension: proto.Bool(part.isExtension), + NamePart: proto.String(string(part.Name.AsIdentifier())), + IsExtension: proto.Bool(part.IsExtension()), } r.putOptionNamePartNode(np, part) ret[i] = np @@ -130,54 +128,53 @@ func (r *parseResult) asUninterpretedOptionName(parts []*optionNamePartNode) []* return ret } -func (r *parseResult) addExtensions(ext *extendNode, flds *[]*dpb.FieldDescriptorProto, msgs *[]*dpb.DescriptorProto, isProto3 bool) { - extendee := ext.extendee.val +func (r *parseResult) addExtensions(ext *ast.ExtendNode, flds *[]*dpb.FieldDescriptorProto, msgs *[]*dpb.DescriptorProto, isProto3 bool) { + extendee := string(ext.Extendee.AsIdentifier()) count := 0 - for _, decl := range ext.decls { - if decl.field != nil { + for _, decl := range ext.Decls { + switch decl := decl.(type) { + case *ast.FieldNode: count++ - decl.field.extendee = ext // use higher limit since we don't know yet whether extendee is messageset wire format - fd := r.asFieldDescriptor(decl.field, internal.MaxTag, isProto3) + fd := r.asFieldDescriptor(decl, internal.MaxTag, isProto3) fd.Extendee = proto.String(extendee) *flds = append(*flds, fd) - } else if decl.group != nil { + case *ast.GroupNode: count++ - decl.group.extendee = ext // ditto: use higher limit right now - fd, md := r.asGroupDescriptors(decl.group, isProto3, internal.MaxTag) + fd, md := r.asGroupDescriptors(decl, isProto3, internal.MaxTag) fd.Extendee = proto.String(extendee) *flds = append(*flds, fd) *msgs = append(*msgs, md) } } if count == 0 { - _ = r.errs.handleErrorWithPos(ext.start(), "extend sections must define at least one extension") + _ = r.errs.handleErrorWithPos(ext.Start(), "extend sections must define at least one extension") } } -func asLabel(lbl *fieldLabel) *dpb.FieldDescriptorProto_Label { - if lbl.identNode == nil { +func asLabel(lbl *ast.FieldLabel) *dpb.FieldDescriptorProto_Label { + if !lbl.IsPresent() { return nil } switch { - case lbl.repeated: + case lbl.Repeated: return dpb.FieldDescriptorProto_LABEL_REPEATED.Enum() - case lbl.required: + case lbl.Required: return dpb.FieldDescriptorProto_LABEL_REQUIRED.Enum() default: return dpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum() } } -func (r *parseResult) asFieldDescriptor(node *fieldNode, maxTag int32, isProto3 bool) *dpb.FieldDescriptorProto { - tag := node.tag.val - if err := checkTag(node.tag.start(), tag, maxTag); err != nil { +func (r *parseResult) asFieldDescriptor(node *ast.FieldNode, maxTag int32, isProto3 bool) *dpb.FieldDescriptorProto { + tag := node.Tag.Val + if err := checkTag(node.Tag.Start(), tag, maxTag); err != nil { _ = r.errs.handleError(err) } - fd := newFieldDescriptor(node.name.val, node.fldType.val, int32(tag), asLabel(&node.label)) + fd := newFieldDescriptor(node.Name.Val, string(node.FldType.AsIdentifier()), int32(tag), asLabel(&node.Label)) r.putFieldNode(fd, node) - if opts := node.options.Elements(); len(opts) > 0 { + if opts := node.Options.GetElements(); len(opts) > 0 { fd.Options = &dpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(opts)} } if isProto3 && fd.Label != nil && fd.GetLabel() == dpb.FieldDescriptorProto_LABEL_OPTIONAL { @@ -223,49 +220,49 @@ func newFieldDescriptor(name string, fieldType string, tag int32, lbl *dpb.Field return fd } -func (r *parseResult) asGroupDescriptors(group *groupNode, isProto3 bool, maxTag int32) (*dpb.FieldDescriptorProto, *dpb.DescriptorProto) { - tag := group.tag.val - if err := checkTag(group.tag.start(), tag, maxTag); err != nil { +func (r *parseResult) asGroupDescriptors(group *ast.GroupNode, isProto3 bool, maxTag int32) (*dpb.FieldDescriptorProto, *dpb.DescriptorProto) { + tag := group.Tag.Val + if err := checkTag(group.Tag.Start(), tag, maxTag); err != nil { _ = r.errs.handleError(err) } - if !unicode.IsUpper(rune(group.name.val[0])) { - _ = r.errs.handleErrorWithPos(group.name.start(), "group %s should have a name that starts with a capital letter", group.name.val) + if !unicode.IsUpper(rune(group.Name.Val[0])) { + _ = r.errs.handleErrorWithPos(group.Name.Start(), "group %s should have a name that starts with a capital letter", group.Name.Val) } - fieldName := strings.ToLower(group.name.val) + fieldName := strings.ToLower(group.Name.Val) fd := &dpb.FieldDescriptorProto{ Name: proto.String(fieldName), JsonName: proto.String(internal.JsonName(fieldName)), Number: proto.Int32(int32(tag)), - Label: asLabel(&group.label), + Label: asLabel(&group.Label), Type: dpb.FieldDescriptorProto_TYPE_GROUP.Enum(), - TypeName: proto.String(group.name.val), + TypeName: proto.String(group.Name.Val), } r.putFieldNode(fd, group) - if opts := group.options.Elements(); len(opts) > 0 { + if opts := group.Options.GetElements(); len(opts) > 0 { fd.Options = &dpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(opts)} } - md := &dpb.DescriptorProto{Name: proto.String(group.name.val)} + md := &dpb.DescriptorProto{Name: proto.String(group.Name.Val)} r.putMessageNode(md, group) - r.addMessageDecls(md, group.decls, isProto3) + r.addMessageBody(md, &group.MessageBody, isProto3) return fd, md } -func (r *parseResult) asMapDescriptors(mapField *mapFieldNode, isProto3 bool, maxTag int32) (*dpb.FieldDescriptorProto, *dpb.DescriptorProto) { - tag := mapField.tag.val - if err := checkTag(mapField.tag.start(), tag, maxTag); err != nil { +func (r *parseResult) asMapDescriptors(mapField *ast.MapFieldNode, isProto3 bool, maxTag int32) (*dpb.FieldDescriptorProto, *dpb.DescriptorProto) { + tag := mapField.Tag.Val + if err := checkTag(mapField.Tag.Start(), tag, maxTag); err != nil { _ = r.errs.handleError(err) } var lbl *dpb.FieldDescriptorProto_Label if !isProto3 { lbl = dpb.FieldDescriptorProto_LABEL_OPTIONAL.Enum() } - keyFd := newFieldDescriptor("key", mapField.mapType.keyType.val, 1, lbl) - r.putFieldNode(keyFd, mapField.keyField()) - valFd := newFieldDescriptor("value", mapField.mapType.valueType.val, 2, lbl) - r.putFieldNode(valFd, mapField.valueField()) - entryName := internal.InitCap(internal.JsonName(mapField.name.val)) + "Entry" - fd := newFieldDescriptor(mapField.name.val, entryName, int32(tag), dpb.FieldDescriptorProto_LABEL_REPEATED.Enum()) - if opts := mapField.options.Elements(); len(opts) > 0 { + keyFd := newFieldDescriptor("key", mapField.MapType.KeyType.Val, 1, lbl) + r.putFieldNode(keyFd, mapField.KeyField()) + valFd := newFieldDescriptor("value", string(mapField.MapType.ValueType.AsIdentifier()), 2, lbl) + r.putFieldNode(valFd, mapField.ValueField()) + entryName := internal.InitCap(internal.JsonName(mapField.Name.Val)) + "Entry" + fd := newFieldDescriptor(mapField.Name.Val, entryName, int32(tag), dpb.FieldDescriptorProto_LABEL_REPEATED.Enum()) + if opts := mapField.Options.GetElements(); len(opts) > 0 { fd.Options = &dpb.FieldOptions{UninterpretedOption: r.asUninterpretedOptions(opts)} } r.putFieldNode(fd, mapField) @@ -278,10 +275,10 @@ func (r *parseResult) asMapDescriptors(mapField *mapFieldNode, isProto3 bool, ma return fd, md } -func (r *parseResult) asExtensionRanges(node *extensionRangeNode, maxTag int32) []*dpb.DescriptorProto_ExtensionRange { - opts := r.asUninterpretedOptions(node.options.Elements()) - ers := make([]*dpb.DescriptorProto_ExtensionRange, len(node.ranges)) - for i, rng := range node.ranges { +func (r *parseResult) asExtensionRanges(node *ast.ExtensionRangeNode, maxTag int32) []*dpb.DescriptorProto_ExtensionRange { + opts := r.asUninterpretedOptions(node.Options.GetElements()) + ers := make([]*dpb.DescriptorProto_ExtensionRange, len(node.Ranges)) + for i, rng := range node.Ranges { start, end := getRangeBounds(r, rng, 0, maxTag) er := &dpb.DescriptorProto_ExtensionRange{ Start: proto.Int32(start), @@ -296,58 +293,64 @@ func (r *parseResult) asExtensionRanges(node *extensionRangeNode, maxTag int32) return ers } -func (r *parseResult) asEnumValue(ev *enumValueNode) *dpb.EnumValueDescriptorProto { - num, ok := ev.number.asInt32(math.MinInt32, math.MaxInt32) +func (r *parseResult) asEnumValue(ev *ast.EnumValueNode) *dpb.EnumValueDescriptorProto { + num, ok := ast.AsInt32(ev.Number, math.MinInt32, math.MaxInt32) if !ok { - _ = r.errs.handleErrorWithPos(ev.number.start(), "value %d is out of range: should be between %d and %d", ev.number.value(), math.MinInt32, math.MaxInt32) + _ = r.errs.handleErrorWithPos(ev.Number.Start(), "value %d is out of range: should be between %d and %d", ev.Number.Value(), math.MinInt32, math.MaxInt32) } - evd := &dpb.EnumValueDescriptorProto{Name: proto.String(ev.name.val), Number: proto.Int32(num)} + evd := &dpb.EnumValueDescriptorProto{Name: proto.String(ev.Name.Val), Number: proto.Int32(num)} r.putEnumValueNode(evd, ev) - if opts := ev.options.Elements(); len(opts) > 0 { + if opts := ev.Options.GetElements(); len(opts) > 0 { evd.Options = &dpb.EnumValueOptions{UninterpretedOption: r.asUninterpretedOptions(opts)} } return evd } -func (r *parseResult) asMethodDescriptor(node *methodNode) *dpb.MethodDescriptorProto { +func (r *parseResult) asMethodDescriptor(node *ast.RPCNode) *dpb.MethodDescriptorProto { md := &dpb.MethodDescriptorProto{ - Name: proto.String(node.name.val), - InputType: proto.String(node.input.msgType.val), - OutputType: proto.String(node.output.msgType.val), + Name: proto.String(node.Name.Val), + InputType: proto.String(string(node.Input.MessageType.AsIdentifier())), + OutputType: proto.String(string(node.Output.MessageType.AsIdentifier())), } r.putMethodNode(md, node) - if node.input.streamKeyword != nil { + if node.Input.Stream != nil { md.ClientStreaming = proto.Bool(true) } - if node.output.streamKeyword != nil { + if node.Output.Stream != nil { md.ServerStreaming = proto.Bool(true) } // protoc always adds a MethodOptions if there are brackets - // We have a non-nil node.options if there are brackets // We do the same to match protoc as closely as possible // https://github.com/protocolbuffers/protobuf/blob/0c3f43a6190b77f1f68b7425d1b7e1a8257a8d0c/src/google/protobuf/compiler/parser.cc#L2152 - if node.options != nil { - md.Options = &dpb.MethodOptions{UninterpretedOption: r.asUninterpretedOptions(node.options)} + if node.OpenBrace != nil { + md.Options = &dpb.MethodOptions{} + for _, decl := range node.Decls { + switch decl := decl.(type) { + case *ast.OptionNode: + md.Options.UninterpretedOption = append(md.Options.UninterpretedOption, r.asUninterpretedOption(decl)) + } + } } return md } -func (r *parseResult) asEnumDescriptor(en *enumNode) *dpb.EnumDescriptorProto { - ed := &dpb.EnumDescriptorProto{Name: proto.String(en.name.val)} +func (r *parseResult) asEnumDescriptor(en *ast.EnumNode) *dpb.EnumDescriptorProto { + ed := &dpb.EnumDescriptorProto{Name: proto.String(en.Name.Val)} r.putEnumNode(ed, en) - for _, decl := range en.decls { - if decl.option != nil { + for _, decl := range en.Decls { + switch decl := decl.(type) { + case *ast.OptionNode: if ed.Options == nil { ed.Options = &dpb.EnumOptions{} } - ed.Options.UninterpretedOption = append(ed.Options.UninterpretedOption, r.asUninterpretedOption(decl.option)) - } else if decl.value != nil { - ed.Value = append(ed.Value, r.asEnumValue(decl.value)) - } else if decl.reserved != nil { - for _, n := range decl.reserved.names { - ed.ReservedName = append(ed.ReservedName, n.val) + ed.Options.UninterpretedOption = append(ed.Options.UninterpretedOption, r.asUninterpretedOption(decl)) + case *ast.EnumValueNode: + ed.Value = append(ed.Value, r.asEnumValue(decl)) + case *ast.ReservedNode: + for _, n := range decl.Names { + ed.ReservedName = append(ed.ReservedName, n.AsString()) } - for _, rng := range decl.reserved.ranges { + for _, rng := range decl.Ranges { ed.ReservedRange = append(ed.ReservedRange, r.asEnumReservedRange(rng)) } } @@ -355,7 +358,7 @@ func (r *parseResult) asEnumDescriptor(en *enumNode) *dpb.EnumDescriptorProto { return ed } -func (r *parseResult) asEnumReservedRange(rng *rangeNode) *dpb.EnumDescriptorProto_EnumReservedRange { +func (r *parseResult) asEnumReservedRange(rng *ast.RangeNode) *dpb.EnumDescriptorProto_EnumReservedRange { start, end := getRangeBounds(r, rng, math.MinInt32, math.MaxInt32) rr := &dpb.EnumDescriptorProto_EnumReservedRange{ Start: proto.Int32(start), @@ -365,73 +368,76 @@ func (r *parseResult) asEnumReservedRange(rng *rangeNode) *dpb.EnumDescriptorPro return rr } -func (r *parseResult) asMessageDescriptor(node *messageNode, isProto3 bool) *dpb.DescriptorProto { - msgd := &dpb.DescriptorProto{Name: proto.String(node.name.val)} +func (r *parseResult) asMessageDescriptor(node *ast.MessageNode, isProto3 bool) *dpb.DescriptorProto { + msgd := &dpb.DescriptorProto{Name: proto.String(node.Name.Val)} r.putMessageNode(msgd, node) - r.addMessageDecls(msgd, node.decls, isProto3) + r.addMessageBody(msgd, &node.MessageBody, isProto3) return msgd } -func (r *parseResult) addMessageDecls(msgd *dpb.DescriptorProto, decls []*messageElement, isProto3 bool) { +func (r *parseResult) addMessageBody(msgd *dpb.DescriptorProto, body *ast.MessageBody, isProto3 bool) { // first process any options - for _, decl := range decls { - if decl.option != nil { + for _, decl := range body.Decls { + if opt, ok := decl.(*ast.OptionNode); ok { if msgd.Options == nil { msgd.Options = &dpb.MessageOptions{} } - msgd.Options.UninterpretedOption = append(msgd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option)) + msgd.Options.UninterpretedOption = append(msgd.Options.UninterpretedOption, r.asUninterpretedOption(opt)) } } // now that we have options, we can see if this uses messageset wire format, which // impacts how we validate tag numbers in any fields in the message maxTag := int32(internal.MaxNormalTag) - if isMessageSet, err := isMessageSetWireFormat(r, "message "+msgd.GetName(), msgd); err != nil { + messageSetOpt, err := isMessageSetWireFormat(r, "message "+msgd.GetName(), msgd) + if err != nil { return - } else if isMessageSet { + } else if messageSetOpt != nil { maxTag = internal.MaxTag // higher limit for messageset wire format } rsvdNames := map[string]int{} // now we can process the rest - for _, decl := range decls { - if decl.enum != nil { - msgd.EnumType = append(msgd.EnumType, r.asEnumDescriptor(decl.enum)) - } else if decl.extend != nil { - r.addExtensions(decl.extend, &msgd.Extension, &msgd.NestedType, isProto3) - } else if decl.extensionRange != nil { - msgd.ExtensionRange = append(msgd.ExtensionRange, r.asExtensionRanges(decl.extensionRange, maxTag)...) - } else if decl.field != nil { - fd := r.asFieldDescriptor(decl.field, maxTag, isProto3) + for _, decl := range body.Decls { + switch decl := decl.(type) { + case *ast.EnumNode: + msgd.EnumType = append(msgd.EnumType, r.asEnumDescriptor(decl)) + case *ast.ExtendNode: + r.addExtensions(decl, &msgd.Extension, &msgd.NestedType, isProto3) + case *ast.ExtensionRangeNode: + msgd.ExtensionRange = append(msgd.ExtensionRange, r.asExtensionRanges(decl, maxTag)...) + case *ast.FieldNode: + fd := r.asFieldDescriptor(decl, maxTag, isProto3) msgd.Field = append(msgd.Field, fd) - } else if decl.mapField != nil { - fd, md := r.asMapDescriptors(decl.mapField, isProto3, maxTag) + case *ast.MapFieldNode: + fd, md := r.asMapDescriptors(decl, isProto3, maxTag) msgd.Field = append(msgd.Field, fd) msgd.NestedType = append(msgd.NestedType, md) - } else if decl.group != nil { - fd, md := r.asGroupDescriptors(decl.group, isProto3, maxTag) + case *ast.GroupNode: + fd, md := r.asGroupDescriptors(decl, isProto3, maxTag) msgd.Field = append(msgd.Field, fd) msgd.NestedType = append(msgd.NestedType, md) - } else if decl.oneOf != nil { + case *ast.OneOfNode: oodIndex := len(msgd.OneofDecl) - ood := &dpb.OneofDescriptorProto{Name: proto.String(decl.oneOf.name.val)} - r.putOneOfNode(ood, decl.oneOf) + ood := &dpb.OneofDescriptorProto{Name: proto.String(decl.Name.Val)} + r.putOneOfNode(ood, decl) msgd.OneofDecl = append(msgd.OneofDecl, ood) ooFields := 0 - for _, oodecl := range decl.oneOf.decls { - if oodecl.option != nil { + for _, oodecl := range decl.Decls { + switch oodecl := oodecl.(type) { + case *ast.OptionNode: if ood.Options == nil { ood.Options = &dpb.OneofOptions{} } - ood.Options.UninterpretedOption = append(ood.Options.UninterpretedOption, r.asUninterpretedOption(oodecl.option)) - } else if oodecl.field != nil { - fd := r.asFieldDescriptor(oodecl.field, maxTag, isProto3) + ood.Options.UninterpretedOption = append(ood.Options.UninterpretedOption, r.asUninterpretedOption(oodecl)) + case *ast.FieldNode: + fd := r.asFieldDescriptor(oodecl, maxTag, isProto3) fd.OneofIndex = proto.Int32(int32(oodIndex)) msgd.Field = append(msgd.Field, fd) ooFields++ - } else if oodecl.group != nil { - fd, md := r.asGroupDescriptors(oodecl.group, isProto3, maxTag) + case *ast.GroupNode: + fd, md := r.asGroupDescriptors(oodecl, isProto3, maxTag) fd.OneofIndex = proto.Int32(int32(oodIndex)) msgd.Field = append(msgd.Field, fd) msgd.NestedType = append(msgd.NestedType, md) @@ -439,56 +445,67 @@ func (r *parseResult) addMessageDecls(msgd *dpb.DescriptorProto, decls []*messag } } if ooFields == 0 { - _ = r.errs.handleErrorWithPos(decl.oneOf.start(), "oneof must contain at least one field") + _ = r.errs.handleErrorWithPos(decl.Start(), "oneof must contain at least one field") } - } else if decl.nested != nil { - msgd.NestedType = append(msgd.NestedType, r.asMessageDescriptor(decl.nested, isProto3)) - } else if decl.reserved != nil { - for _, n := range decl.reserved.names { - count := rsvdNames[n.val] + case *ast.MessageNode: + msgd.NestedType = append(msgd.NestedType, r.asMessageDescriptor(decl, isProto3)) + case *ast.ReservedNode: + for _, n := range decl.Names { + count := rsvdNames[n.AsString()] if count == 1 { // already seen - _ = r.errs.handleErrorWithPos(n.start(), "name %q is reserved multiple times", n.val) + _ = r.errs.handleErrorWithPos(n.Start(), "name %q is reserved multiple times", n.AsString()) } - rsvdNames[n.val] = count + 1 - msgd.ReservedName = append(msgd.ReservedName, n.val) + rsvdNames[n.AsString()] = count + 1 + msgd.ReservedName = append(msgd.ReservedName, n.AsString()) } - for _, rng := range decl.reserved.ranges { + for _, rng := range decl.Ranges { msgd.ReservedRange = append(msgd.ReservedRange, r.asMessageReservedRange(rng, maxTag)) } } } + if messageSetOpt != nil { + if len(msgd.Field) > 0 { + node := r.getFieldNode(msgd.Field[0]) + _ = r.errs.handleErrorWithPos(node.Start(), "messages with message-set wire format cannot contain non-extension fields") + } + if len(msgd.ExtensionRange) == 0 { + node := r.getOptionNode(messageSetOpt) + _ = r.errs.handleErrorWithPos(node.Start(), "messages with message-set wire format must contain at least one extension range") + } + } + // process any proto3_optional fields if isProto3 { internal.ProcessProto3OptionalFields(msgd) } } -func isMessageSetWireFormat(res *parseResult, scope string, md *dpb.DescriptorProto) (bool, error) { +func isMessageSetWireFormat(res *parseResult, scope string, md *dpb.DescriptorProto) (*dpb.UninterpretedOption, error) { uo := md.GetOptions().GetUninterpretedOption() index, err := findOption(res, scope, uo, "message_set_wire_format") if err != nil { - return false, err + return nil, err } if index == -1 { - // no such option, so default to false - return false, nil + // no such option + return nil, nil } opt := uo[index] - optNode := res.getOptionNode(opt) switch opt.GetIdentifierValue() { case "true": - return true, nil + return opt, nil case "false": - return false, nil + return nil, nil default: - return false, res.errs.handleErrorWithPos(optNode.getValue().start(), "%s: expecting bool value for message_set_wire_format option", scope) + optNode := res.getOptionNode(opt) + return nil, res.errs.handleErrorWithPos(optNode.GetValue().Start(), "%s: expecting bool value for message_set_wire_format option", scope) } } -func (r *parseResult) asMessageReservedRange(rng *rangeNode, maxTag int32) *dpb.DescriptorProto_ReservedRange { +func (r *parseResult) asMessageReservedRange(rng *ast.RangeNode, maxTag int32) *dpb.DescriptorProto_ReservedRange { start, end := getRangeBounds(r, rng, 0, maxTag) rr := &dpb.DescriptorProto_ReservedRange{ Start: proto.Int32(start), @@ -498,40 +515,41 @@ func (r *parseResult) asMessageReservedRange(rng *rangeNode, maxTag int32) *dpb. return rr } -func getRangeBounds(res *parseResult, rng *rangeNode, minVal, maxVal int32) (int32, int32) { +func getRangeBounds(res *parseResult, rng *ast.RangeNode, minVal, maxVal int32) (int32, int32) { checkOrder := true - start, ok := rng.startValueAsInt32(minVal, maxVal) + start, ok := rng.StartValueAsInt32(minVal, maxVal) if !ok { checkOrder = false - _ = res.errs.handleErrorWithPos(rng.startNode.start(), "range start %d is out of range: should be between %d and %d", rng.startValue(), minVal, maxVal) + _ = res.errs.handleErrorWithPos(rng.StartVal.Start(), "range start %d is out of range: should be between %d and %d", rng.StartValue(), minVal, maxVal) } - end, ok := rng.endValueAsInt32(minVal, maxVal) + end, ok := rng.EndValueAsInt32(minVal, maxVal) if !ok { checkOrder = false - if rng.endNode != nil { - _ = res.errs.handleErrorWithPos(rng.endNode.start(), "range end %d is out of range: should be between %d and %d", rng.endValue(), minVal, maxVal) + if rng.EndVal != nil { + _ = res.errs.handleErrorWithPos(rng.EndVal.Start(), "range end %d is out of range: should be between %d and %d", rng.EndValue(), minVal, maxVal) } } if checkOrder && start > end { - _ = res.errs.handleErrorWithPos(rng.rangeStart().start(), "range, %d to %d, is invalid: start must be <= end", start, end) + _ = res.errs.handleErrorWithPos(rng.RangeStart().Start(), "range, %d to %d, is invalid: start must be <= end", start, end) } return start, end } -func (r *parseResult) asServiceDescriptor(svc *serviceNode) *dpb.ServiceDescriptorProto { - sd := &dpb.ServiceDescriptorProto{Name: proto.String(svc.name.val)} +func (r *parseResult) asServiceDescriptor(svc *ast.ServiceNode) *dpb.ServiceDescriptorProto { + sd := &dpb.ServiceDescriptorProto{Name: proto.String(svc.Name.Val)} r.putServiceNode(sd, svc) - for _, decl := range svc.decls { - if decl.option != nil { + for _, decl := range svc.Decls { + switch decl := decl.(type) { + case *ast.OptionNode: if sd.Options == nil { sd.Options = &dpb.ServiceOptions{} } - sd.Options.UninterpretedOption = append(sd.Options.UninterpretedOption, r.asUninterpretedOption(decl.option)) - } else if decl.rpc != nil { - sd.Method = append(sd.Method, r.asMethodDescriptor(decl.rpc)) + sd.Options.UninterpretedOption = append(sd.Options.UninterpretedOption, r.asUninterpretedOption(decl)) + case *ast.RPCNode: + sd.Method = append(sd.Method, r.asMethodDescriptor(decl)) } } return sd diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/errors.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/errors.go index 79e2adc29bf..77ead4d066b 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/errors.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/errors.go @@ -16,6 +16,9 @@ var ErrInvalidSource = errors.New("parse failed: invalid proto source") // indicates the file that had no syntax statement. var ErrNoSyntax = errors.New("no syntax specified; defaulting to proto2 syntax") +// ErrLookupImportAndProtoSet is the error returned if both LookupImport and LookupImportProto are set. +var ErrLookupImportAndProtoSet = errors.New("both LookupImport and LookupImportProto set") + // ErrorReporter is responsible for reporting the given error. If the reporter // returns a non-nil error, parsing/linking will abort with that error. If the // reporter returns nil, parsing will continue, allowing the parser to try to diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go index ce3d2f6d8a6..e8f2cd0af93 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/lexer.go @@ -9,10 +9,13 @@ import ( "strconv" "strings" "unicode/utf8" + + "github.com/jhump/protoreflect/desc/protoparse/ast" ) type runeReader struct { rr *bufio.Reader + marked []rune unread []rune err error } @@ -24,19 +27,40 @@ func (rr *runeReader) readRune() (r rune, size int, err error) { if len(rr.unread) > 0 { r := rr.unread[len(rr.unread)-1] rr.unread = rr.unread[:len(rr.unread)-1] + if rr.marked != nil { + rr.marked = append(rr.marked, r) + } return r, utf8.RuneLen(r), nil } r, sz, err := rr.rr.ReadRune() if err != nil { rr.err = err + } else if rr.marked != nil { + rr.marked = append(rr.marked, r) } return r, sz, err } func (rr *runeReader) unreadRune(r rune) { + if rr.marked != nil { + if rr.marked[len(rr.marked)-1] != r { + panic("unread rune is not the same as last marked rune!") + } + rr.marked = rr.marked[:len(rr.marked)-1] + } rr.unread = append(rr.unread, r) } +func (rr *runeReader) startMark(initial rune) { + rr.marked = []rune{initial} +} + +func (rr *runeReader) endMark() string { + m := string(rr.marked) + rr.marked = rr.marked[:0] + return m +} + func lexError(l protoLexer, pos *SourcePos, err string) { pl := l.(*protoLex) _ = pl.errs.handleErrorWithPos(pos, err) @@ -46,23 +70,35 @@ type protoLex struct { filename string input *runeReader errs *errorHandler - res *fileNode + res *ast.FileNode lineNo int colNo int offset int - prevSym terminalNode + prevSym ast.TerminalNode + eof ast.TerminalNode prevLineNo int prevColNo int prevOffset int - comments []comment + comments []ast.Comment + ws []rune } +var utf8Bom = []byte{0xEF, 0xBB, 0xBF} + func newLexer(in io.Reader, filename string, errs *errorHandler) *protoLex { + br := bufio.NewReader(in) + + // if file has UTF8 byte order marker preface, consume it + marker, err := br.Peek(3) + if err == nil && bytes.Equal(marker, utf8Bom) { + _, _ = br.Discard(3) + } + return &protoLex{ - input: &runeReader{rr: bufio.NewReader(in)}, + input: &runeReader{rr: br}, filename: filename, errs: errs, } @@ -150,7 +186,7 @@ func (l *protoLex) prev() *SourcePos { Col: 1, } } - return l.prevSym.start() + return l.prevSym.Start() } func (l *protoLex) Lex(lval *protoSymType) int { @@ -164,6 +200,8 @@ func (l *protoLex) Lex(lval *protoSymType) int { l.prevColNo = l.colNo l.prevOffset = l.offset l.comments = nil + l.ws = nil + l.input.endMark() // reset, just in case for { c, n, err := l.input.readRune() @@ -171,7 +209,8 @@ func (l *protoLex) Lex(lval *protoSymType) int { // we're not actually returning a rune, but this will associate // accumulated comments as a trailing comment on last symbol // (if appropriate) - l.setRune(lval) + l.setRune(lval, 0) + l.eof = lval.b return 0 } else if err != nil { // we don't call setError because we don't want it wrapped @@ -188,14 +227,16 @@ func (l *protoLex) Lex(lval *protoSymType) int { l.offset += n l.adjustPos(c) if strings.ContainsRune("\n\r\t ", c) { + l.ws = append(l.ws, c) continue } + l.input.startMark(c) if c == '.' { // decimal literals could start with a dot cn, _, err := l.input.readRune() if err != nil { - l.setDot(lval) + l.setRune(lval, c) return int(c) } if cn >= '0' && cn <= '9' { @@ -211,7 +252,7 @@ func (l *protoLex) Lex(lval *protoSymType) int { return _FLOAT_LIT } l.input.unreadRune(cn) - l.setDot(lval) + l.setRune(lval, c) return int(c) } @@ -311,63 +352,77 @@ func (l *protoLex) Lex(lval *protoSymType) int { // comment cn, _, err := l.input.readRune() if err != nil { - l.setRune(lval) + l.setRune(lval, '/') return int(c) } if cn == '/' { l.adjustPos(cn) - hitNewline, txt := l.skipToEndOfLineComment() - commentPos := l.posRange() - commentPos.end.Col++ + hitNewline := l.skipToEndOfLineComment() + comment := l.newComment() + comment.PosRange.End.Col++ if hitNewline { // we don't do this inside of skipToEndOfLineComment // because we want to know the length of previous // line for calculation above l.adjustPos('\n') } - l.comments = append(l.comments, comment{posRange: commentPos, text: txt}) + l.comments = append(l.comments, comment) continue } if cn == '*' { l.adjustPos(cn) - if txt, ok := l.skipToEndOfBlockComment(); !ok { + if ok := l.skipToEndOfBlockComment(); !ok { l.setError(lval, errors.New("block comment never terminates, unexpected EOF")) return _ERROR } else { - l.comments = append(l.comments, comment{posRange: l.posRange(), text: txt}) + l.comments = append(l.comments, l.newComment()) } continue } l.input.unreadRune(cn) } - l.setRune(lval) + l.setRune(lval, c) return int(c) } } -func (l *protoLex) posRange() posRange { - return posRange{ - start: SourcePos{ +func (l *protoLex) posRange() ast.PosRange { + return ast.PosRange{ + Start: SourcePos{ Filename: l.filename, Offset: l.prevOffset, Line: l.prevLineNo + 1, Col: l.prevColNo + 1, }, - end: l.cur(), + End: l.cur(), + } +} + +func (l *protoLex) newComment() ast.Comment { + ws := string(l.ws) + l.ws = l.ws[:0] + return ast.Comment{ + PosRange: l.posRange(), + LeadingWhitespace: ws, + Text: l.input.endMark(), } } -func (l *protoLex) newBasicNode() basicNode { - return basicNode{ - posRange: l.posRange(), - leading: l.comments, +func (l *protoLex) newTokenInfo() ast.TokenInfo { + ws := string(l.ws) + l.ws = nil + return ast.TokenInfo{ + PosRange: l.posRange(), + LeadingComments: l.comments, + LeadingWhitespace: ws, + RawText: l.input.endMark(), } } -func (l *protoLex) setPrev(n terminalNode, isDot bool) { - nStart := n.start().Line - if _, ok := n.(*basicNode); ok { +func (l *protoLex) setPrev(n ast.TerminalNode, isDot bool) { + nStart := n.Start().Line + if _, ok := n.(*ast.RuneNode); ok { // This is really gross, but there are many cases where we don't want // to attribute comments to punctuation (like commas, equals, semicolons) // and would instead prefer to attribute comments to a more meaningful @@ -382,33 +437,33 @@ func (l *protoLex) setPrev(n terminalNode, isDot bool) { nStart += 2 } } - if l.prevSym != nil && len(n.leadingComments()) > 0 && l.prevSym.end().Line < nStart { + if l.prevSym != nil && len(n.LeadingComments()) > 0 && l.prevSym.End().Line < nStart { // we may need to re-attribute the first comment to // instead be previous node's trailing comment - prevEnd := l.prevSym.end().Line - comments := n.leadingComments() + prevEnd := l.prevSym.End().Line + comments := n.LeadingComments() c := comments[0] - commentStart := c.start.Line + commentStart := c.Start.Line if commentStart == prevEnd { // comment is on same line as previous symbol - n.popLeadingComment() - l.prevSym.pushTrailingComment(c) + n.PopLeadingComment() + l.prevSym.PushTrailingComment(c) } else if commentStart == prevEnd+1 { // comment is right after previous symbol; see if it is detached // and if so re-attribute - singleLineStyle := strings.HasPrefix(c.text, "//") - line := c.end.Line + singleLineStyle := strings.HasPrefix(c.Text, "//") + line := c.End.Line groupEnd := -1 for i := 1; i < len(comments); i++ { c := comments[i] newGroup := false - if !singleLineStyle || c.start.Line > line+1 { + if !singleLineStyle || c.Start.Line > line+1 { // we've found a gap between comments, which means the // previous comments were detached newGroup = true } else { - line = c.end.Line - singleLineStyle = strings.HasPrefix(comments[i].text, "//") + line = c.End.Line + singleLineStyle = strings.HasPrefix(comments[i].Text, "//") if !singleLineStyle { // we've found a switch from // comments to /* // consider that a new group which means the @@ -428,13 +483,13 @@ func (l *protoLex) setPrev(n terminalNode, isDot bool) { // detached from current symbol c1 := comments[0] c2 := comments[len(comments)-1] - if c1.start.Line <= prevEnd+1 && c2.end.Line < nStart-1 { + if c1.Start.Line <= prevEnd+1 && c2.End.Line < nStart-1 { groupEnd = len(comments) } } for i := 0; i < groupEnd; i++ { - l.prevSym.pushTrailingComment(n.popLeadingComment()) + l.prevSym.PushTrailingComment(n.PopLeadingComment()) } } } @@ -443,35 +498,28 @@ func (l *protoLex) setPrev(n terminalNode, isDot bool) { } func (l *protoLex) setString(lval *protoSymType, val string) { - lval.s = &stringLiteralNode{basicNode: l.newBasicNode(), val: val} + lval.s = ast.NewStringLiteralNode(val, l.newTokenInfo()) l.setPrev(lval.s, false) } func (l *protoLex) setIdent(lval *protoSymType, val string) { - lval.id = &identNode{basicNode: l.newBasicNode(), val: val} + lval.id = ast.NewIdentNode(val, l.newTokenInfo()) l.setPrev(lval.id, false) } func (l *protoLex) setInt(lval *protoSymType, val uint64) { - lval.i = &intLiteralNode{basicNode: l.newBasicNode(), val: val} + lval.i = ast.NewUintLiteralNode(val, l.newTokenInfo()) l.setPrev(lval.i, false) } func (l *protoLex) setFloat(lval *protoSymType, val float64) { - lval.f = &floatLiteralNode{basicNode: l.newBasicNode(), val: val} + lval.f = ast.NewFloatLiteralNode(val, l.newTokenInfo()) l.setPrev(lval.f, false) } -func (l *protoLex) setRune(lval *protoSymType) { - b := l.newBasicNode() - lval.b = &b - l.setPrev(lval.b, false) -} - -func (l *protoLex) setDot(lval *protoSymType) { - b := l.newBasicNode() - lval.b = &b - l.setPrev(lval.b, true) +func (l *protoLex) setRune(lval *protoSymType, val rune) { + lval.b = ast.NewRuneNode(val, l.newTokenInfo()) + l.setPrev(lval.b, val == '.') } func (l *protoLex) setError(lval *protoSymType, err error) { @@ -724,39 +772,34 @@ func (l *protoLex) readStringLiteral(quote rune) (string, error) { return buf.String(), nil } -func (l *protoLex) skipToEndOfLineComment() (bool, string) { - txt := []rune{'/', '/'} +func (l *protoLex) skipToEndOfLineComment() bool { for { c, _, err := l.input.readRune() if err != nil { - return false, string(txt) + return false } if c == '\n' { - return true, string(append(txt, '\n')) + return true } l.adjustPos(c) - txt = append(txt, c) } } -func (l *protoLex) skipToEndOfBlockComment() (string, bool) { - txt := []rune{'/', '*'} +func (l *protoLex) skipToEndOfBlockComment() bool { for { c, _, err := l.input.readRune() if err != nil { - return "", false + return false } l.adjustPos(c) - txt = append(txt, c) if c == '*' { c, _, err := l.input.readRune() if err != nil { - return "", false + return false } if c == '/' { l.adjustPos(c) - txt = append(txt, c) - return string(txt), true + return true } l.input.unreadRune(c) } diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go index 9d49e565b07..872b42fa113 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/linker.go @@ -11,6 +11,7 @@ import ( "github.com/jhump/protoreflect/desc" "github.com/jhump/protoreflect/desc/internal" + "github.com/jhump/protoreflect/desc/protoparse/ast" ) type linker struct { @@ -68,7 +69,7 @@ func (l *linker) linkFiles() (map[string]*desc.FileDescriptor, error) { } // we should now have any message_set_wire_format options parsed // and can do further validation on tag ranges - if err := checkExtensionTagsInFile(fd, r); err != nil { + if err := checkExtensionsInFile(fd, r); err != nil { return nil, err } } @@ -141,7 +142,7 @@ func (l *linker) createDescriptorPool() error { desc1, desc2 = desc2, desc1 } node := l.files[file2].nodes[desc2] - if err := l.errs.handleErrorWithPos(node.start(), "duplicate symbol %s: already defined as %s in %q", k, descriptorType(desc1), file1); err != nil { + if err := l.errs.handleErrorWithPos(node.Start(), "duplicate symbol %s: already defined as %s in %q", k, descriptorType(desc1), file1); err != nil { return err } } @@ -217,7 +218,7 @@ func addServiceToPool(r *parseResult, pool map[string]proto.Message, errs *error func addToPool(r *parseResult, pool map[string]proto.Message, errs *errorHandler, fqn string, dsc proto.Message) error { if d, ok := pool[fqn]; ok { node := r.nodes[dsc] - if err := errs.handleErrorWithPos(node.start(), "duplicate symbol %s: already defined as %s", fqn, descriptorType(d)); err != nil { + if err := errs.handleErrorWithPos(node.Start(), "duplicate symbol %s: already defined as %s", fqn, descriptorType(d)); err != nil { return err } } @@ -362,12 +363,12 @@ func (l *linker) resolveFieldTypes(r *parseResult, fd *dpb.FileDescriptorProto, elemType = "extension" fqn, dsc, _ := l.resolve(fd, fld.GetExtendee(), isMessage, scopes) if dsc == nil { - return l.errs.handleErrorWithPos(node.fieldExtendee().start(), "unknown extendee type %s", fld.GetExtendee()) + return l.errs.handleErrorWithPos(node.FieldExtendee().Start(), "unknown extendee type %s", fld.GetExtendee()) } extd, ok := dsc.(*dpb.DescriptorProto) if !ok { otherType := descriptorType(dsc) - return l.errs.handleErrorWithPos(node.fieldExtendee().start(), "extendee is invalid: %s is a %s, not a message", fqn, otherType) + return l.errs.handleErrorWithPos(node.FieldExtendee().Start(), "extendee is invalid: %s is a %s, not a message", fqn, otherType) } fld.Extendee = proto.String("." + fqn) // make sure the tag number is in range @@ -380,7 +381,7 @@ func (l *linker) resolveFieldTypes(r *parseResult, fd *dpb.FileDescriptorProto, } } if !found { - if err := l.errs.handleErrorWithPos(node.fieldTag().start(), "%s: tag %d is not in valid range for extended type %s", scope, tag, fqn); err != nil { + if err := l.errs.handleErrorWithPos(node.FieldTag().Start(), "%s: tag %d is not in valid range for extended type %s", scope, tag, fqn); err != nil { return err } } else { @@ -391,7 +392,7 @@ func (l *linker) resolveFieldTypes(r *parseResult, fd *dpb.FileDescriptorProto, l.extensions[fqn] = usedExtTags } if other := usedExtTags[fld.GetNumber()]; other != "" { - if err := l.errs.handleErrorWithPos(node.fieldTag().start(), "%s: duplicate extension: %s and %s are both using tag %d", scope, other, thisName, fld.GetNumber()); err != nil { + if err := l.errs.handleErrorWithPos(node.FieldTag().Start(), "%s: duplicate extension: %s and %s are both using tag %d", scope, other, thisName, fld.GetNumber()); err != nil { return err } } else { @@ -413,7 +414,7 @@ func (l *linker) resolveFieldTypes(r *parseResult, fd *dpb.FileDescriptorProto, fqn, dsc, proto3 := l.resolve(fd, fld.GetTypeName(), isType, scopes) if dsc == nil { - return l.errs.handleErrorWithPos(node.fieldType().start(), "%s: unknown type %s", scope, fld.GetTypeName()) + return l.errs.handleErrorWithPos(node.FieldType().Start(), "%s: unknown type %s", scope, fld.GetTypeName()) } switch dsc := dsc.(type) { case *dpb.DescriptorProto: @@ -425,14 +426,14 @@ func (l *linker) resolveFieldTypes(r *parseResult, fd *dpb.FileDescriptorProto, case *dpb.EnumDescriptorProto: if fld.GetExtendee() == "" && isProto3(fd) && !proto3 { // fields in a proto3 message cannot refer to proto2 enums - return l.errs.handleErrorWithPos(node.fieldType().start(), "%s: cannot use proto2 enum %s in a proto3 message", scope, fld.GetTypeName()) + return l.errs.handleErrorWithPos(node.FieldType().Start(), "%s: cannot use proto2 enum %s in a proto3 message", scope, fld.GetTypeName()) } fld.TypeName = proto.String("." + fqn) // the type was tentatively unset, but now we know it's actually an enum fld.Type = dpb.FieldDescriptorProto_TYPE_ENUM.Enum() default: otherType := descriptorType(dsc) - return l.errs.handleErrorWithPos(node.fieldType().start(), "%s: invalid type: %s is a %s, not a message or enum", scope, fqn, otherType) + return l.errs.handleErrorWithPos(node.FieldType().Start(), "%s: invalid type: %s is a %s, not a message or enum", scope, fqn, otherType) } return nil } @@ -455,12 +456,12 @@ func (l *linker) resolveServiceTypes(r *parseResult, fd *dpb.FileDescriptorProto node := r.getMethodNode(mtd) fqn, dsc, _ := l.resolve(fd, mtd.GetInputType(), isMessage, scopes) if dsc == nil { - if err := l.errs.handleErrorWithPos(node.getInputType().start(), "%s: unknown request type %s", scope, mtd.GetInputType()); err != nil { + if err := l.errs.handleErrorWithPos(node.GetInputType().Start(), "%s: unknown request type %s", scope, mtd.GetInputType()); err != nil { return err } } else if _, ok := dsc.(*dpb.DescriptorProto); !ok { otherType := descriptorType(dsc) - if err := l.errs.handleErrorWithPos(node.getInputType().start(), "%s: invalid request type: %s is a %s, not a message", scope, fqn, otherType); err != nil { + if err := l.errs.handleErrorWithPos(node.GetInputType().Start(), "%s: invalid request type: %s is a %s, not a message", scope, fqn, otherType); err != nil { return err } } else { @@ -469,12 +470,12 @@ func (l *linker) resolveServiceTypes(r *parseResult, fd *dpb.FileDescriptorProto fqn, dsc, _ = l.resolve(fd, mtd.GetOutputType(), isMessage, scopes) if dsc == nil { - if err := l.errs.handleErrorWithPos(node.getOutputType().start(), "%s: unknown response type %s", scope, mtd.GetOutputType()); err != nil { + if err := l.errs.handleErrorWithPos(node.GetOutputType().Start(), "%s: unknown response type %s", scope, mtd.GetOutputType()); err != nil { return err } } else if _, ok := dsc.(*dpb.DescriptorProto); !ok { otherType := descriptorType(dsc) - if err := l.errs.handleErrorWithPos(node.getOutputType().start(), "%s: invalid response type: %s is a %s, not a message", scope, fqn, otherType); err != nil { + if err := l.errs.handleErrorWithPos(node.GetOutputType().Start(), "%s: invalid response type: %s is a %s, not a message", scope, fqn, otherType); err != nil { return err } } else { @@ -496,19 +497,19 @@ opts: node := r.getOptionNamePartNode(nm) fqn, dsc, _ := l.resolve(fd, nm.GetNamePart(), isField, scopes) if dsc == nil { - if err := l.errs.handleErrorWithPos(node.start(), "%sunknown extension %s", scope, nm.GetNamePart()); err != nil { + if err := l.errs.handleErrorWithPos(node.Start(), "%sunknown extension %s", scope, nm.GetNamePart()); err != nil { return err } continue opts } if ext, ok := dsc.(*dpb.FieldDescriptorProto); !ok { otherType := descriptorType(dsc) - if err := l.errs.handleErrorWithPos(node.start(), "%sinvalid extension: %s is a %s, not an extension", scope, nm.GetNamePart(), otherType); err != nil { + if err := l.errs.handleErrorWithPos(node.Start(), "%sinvalid extension: %s is a %s, not an extension", scope, nm.GetNamePart(), otherType); err != nil { return err } continue opts } else if ext.GetExtendee() == "" { - if err := l.errs.handleErrorWithPos(node.start(), "%sinvalid extension: %s is a field but not an extension", scope, nm.GetNamePart()); err != nil { + if err := l.errs.handleErrorWithPos(node.Start(), "%sinvalid extension: %s is a field but not an extension", scope, nm.GetNamePart()); err != nil { return err } continue opts @@ -703,19 +704,21 @@ func (l *linker) linkFile(name string, rootImportLoc *SourcePos, seen []string, if rootImportLoc == nil { // try to find a source location for this "root" import decl := r.getFileNode(r.fd) - fnode, ok := decl.(*fileNode) + fnode, ok := decl.(*ast.FileNode) if ok { - for _, dep := range fnode.imports { - ldep, err := l.linkFile(dep.name.val, dep.name.start(), seen, linked) - if err != nil { - return nil, err + for _, decl := range fnode.Decls { + if dep, ok := decl.(*ast.ImportNode); ok { + ldep, err := l.linkFile(dep.Name.AsString(), dep.Name.Start(), seen, linked) + if err != nil { + return nil, err + } + deps = append(deps, ldep) } - deps = append(deps, ldep) } } else { // no AST? just use the descriptor for _, dep := range r.fd.Dependency { - ldep, err := l.linkFile(dep, decl.start(), seen, linked) + ldep, err := l.linkFile(dep, decl.Start(), seen, linked) if err != nil { return nil, err } diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go index 403deb99373..23b06d875c8 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/options.go @@ -10,6 +10,7 @@ import ( "github.com/jhump/protoreflect/desc" "github.com/jhump/protoreflect/desc/internal" + "github.com/jhump/protoreflect/desc/protoparse/ast" "github.com/jhump/protoreflect/dynamic" ) @@ -795,12 +796,12 @@ func interpretFieldOptions(r *parseResult, fld fldDescriptorish) error { optNode := r.getOptionNode(opt) // attribute source code info - if on, ok := optNode.(*optionNode); ok { + if on, ok := optNode.(*ast.OptionNode); ok { r.interpretedOptions[on] = []int32{-1, internal.Field_jsonNameTag} } uo = removeOption(uo, index) if opt.StringValue == nil { - if err := r.errs.handleErrorWithPos(optNode.getValue().start(), "%s: expecting string value for json_name option", scope); err != nil { + if err := r.errs.handleErrorWithPos(optNode.GetValue().Start(), "%s: expecting string value for json_name option", scope); err != nil { return err } } else { @@ -814,7 +815,7 @@ func interpretFieldOptions(r *parseResult, fld fldDescriptorish) error { } else if index >= 0 { // attribute source code info optNode := r.getOptionNode(uo[index]) - if on, ok := optNode.(*optionNode); ok { + if on, ok := optNode.(*ast.OptionNode); ok { r.interpretedOptions[on] = []int32{-1, internal.Field_defaultTag} } uo = removeOption(uo, index) @@ -841,14 +842,14 @@ func processDefaultOption(res *parseResult, scope string, fld fldDescriptorish, optNode := res.getOptionNode(opt) fdp := fld.AsFieldDescriptorProto() if fdp.GetLabel() == dpb.FieldDescriptorProto_LABEL_REPEATED { - return -1, res.errs.handleErrorWithPos(optNode.getName().start(), "%s: default value cannot be set because field is repeated", scope) + return -1, res.errs.handleErrorWithPos(optNode.GetName().Start(), "%s: default value cannot be set because field is repeated", scope) } if fdp.GetType() == dpb.FieldDescriptorProto_TYPE_GROUP || fdp.GetType() == dpb.FieldDescriptorProto_TYPE_MESSAGE { - return -1, res.errs.handleErrorWithPos(optNode.getName().start(), "%s: default value cannot be set because field is a message", scope) + return -1, res.errs.handleErrorWithPos(optNode.GetName().Start(), "%s: default value cannot be set because field is a message", scope) } - val := optNode.getValue() - if _, ok := val.(*aggregateLiteralNode); ok { - return -1, res.errs.handleErrorWithPos(val.start(), "%s: default value cannot be an aggregate", scope) + val := optNode.GetValue() + if _, ok := val.(*ast.MessageLiteralNode); ok { + return -1, res.errs.handleErrorWithPos(val.Start(), "%s: default value cannot be a message", scope) } mc := &messageContext{ res: res, @@ -936,7 +937,7 @@ func interpretOptions(res *parseResult, element descriptorish, opts proto.Messag return uninterpreted, nil } node := res.nodes[element.AsProto()] - return nil, res.errs.handleError(ErrorWithSourcePos{Pos: node.start(), Underlying: err}) + return nil, res.errs.handleError(ErrorWithSourcePos{Pos: node.Start(), Underlying: err}) } mc := &messageContext{res: res, file: element.GetFile(), elementName: element.GetFullyQualifiedName(), elementType: descriptorType(element.AsProto())} @@ -949,7 +950,7 @@ func interpretOptions(res *parseResult, element descriptorish, opts proto.Messag continue } // uninterpreted_option might be found reflectively, but is not actually valid for use - if err := res.errs.handleErrorWithPos(node.getName().start(), "%vinvalid option 'uninterpreted_option'", mc); err != nil { + if err := res.errs.handleErrorWithPos(node.GetName().Start(), "%vinvalid option 'uninterpreted_option'", mc); err != nil { return nil, err } } @@ -962,7 +963,7 @@ func interpretOptions(res *parseResult, element descriptorish, opts proto.Messag } return nil, err } - if optn, ok := node.(*optionNode); ok { + if optn, ok := node.(*ast.OptionNode); ok { res.interpretedOptions[optn] = path } } @@ -987,7 +988,7 @@ func interpretOptions(res *parseResult, element descriptorish, opts proto.Messag if err := dm.ValidateRecursive(); err != nil { node := res.nodes[element.AsProto()] - if err := res.errs.handleErrorWithPos(node.start(), "error in %s options: %v", descriptorType(element.AsProto()), err); err != nil { + if err := res.errs.handleErrorWithPos(node.Start(), "error in %s options: %v", descriptorType(element.AsProto()), err); err != nil { return nil, err } } @@ -995,7 +996,7 @@ func interpretOptions(res *parseResult, element descriptorish, opts proto.Messag // nw try to convert into the passed in message and fail if not successful if err := dm.ConvertToDeterministic(opts); err != nil { node := res.nodes[element.AsProto()] - return nil, res.errs.handleError(ErrorWithSourcePos{Pos: node.start(), Underlying: err}) + return nil, res.errs.handleError(ErrorWithSourcePos{Pos: node.Start(), Underlying: err}) } return nil, nil @@ -1012,19 +1013,19 @@ func interpretField(res *parseResult, mc *messageContext, element descriptorish, } fld = findExtension(element.GetFile(), extName, false, map[fileDescriptorish]struct{}{}) if fld == nil { - return nil, res.errs.handleErrorWithPos(node.start(), + return nil, res.errs.handleErrorWithPos(node.Start(), "%vunrecognized extension %s of %s", mc, extName, dm.GetMessageDescriptor().GetFullyQualifiedName()) } if fld.GetOwner().GetFullyQualifiedName() != dm.GetMessageDescriptor().GetFullyQualifiedName() { - return nil, res.errs.handleErrorWithPos(node.start(), + return nil, res.errs.handleErrorWithPos(node.Start(), "%vextension %s should extend %s but instead extends %s", mc, extName, dm.GetMessageDescriptor().GetFullyQualifiedName(), fld.GetOwner().GetFullyQualifiedName()) } } else { fld = dm.GetMessageDescriptor().FindFieldByName(nm.GetNamePart()) if fld == nil { - return nil, res.errs.handleErrorWithPos(node.start(), + return nil, res.errs.handleErrorWithPos(node.Start(), "%vfield %s of %s does not exist", mc, nm.GetNamePart(), dm.GetMessageDescriptor().GetFullyQualifiedName()) } @@ -1036,12 +1037,12 @@ func interpretField(res *parseResult, mc *messageContext, element descriptorish, nextnm := opt.GetName()[nameIndex+1] nextnode := res.getOptionNamePartNode(nextnm) if fld.GetType() != dpb.FieldDescriptorProto_TYPE_MESSAGE { - return nil, res.errs.handleErrorWithPos(nextnode.start(), + return nil, res.errs.handleErrorWithPos(nextnode.Start(), "%vcannot set field %s because %s is not a message", mc, nextnm.GetNamePart(), nm.GetNamePart()) } if fld.IsRepeated() { - return nil, res.errs.handleErrorWithPos(nextnode.start(), + return nil, res.errs.handleErrorWithPos(nextnode.Start(), "%vcannot set field %s because %s is repeated (must use an aggregate)", mc, nextnm.GetNamePart(), nm.GetNamePart()) } @@ -1056,14 +1057,14 @@ func interpretField(res *parseResult, mc *messageContext, element descriptorish, err = dm.TrySetField(fld, fdm) } if err != nil { - return nil, res.errs.handleError(ErrorWithSourcePos{Pos: node.start(), Underlying: err}) + return nil, res.errs.handleError(ErrorWithSourcePos{Pos: node.Start(), Underlying: err}) } // recurse to set next part of name return interpretField(res, mc, element, fdm, opt, nameIndex+1, path) } optNode := res.getOptionNode(opt) - if err := setOptionField(res, mc, dm, fld, node, optNode.getValue()); err != nil { + if err := setOptionField(res, mc, dm, fld, node, optNode.GetValue()); err != nil { return nil, res.errs.handleError(err) } if fld.IsRepeated() { @@ -1105,12 +1106,12 @@ func findExtension(fd fileDescriptorish, name string, public bool, checked map[f return nil } -func setOptionField(res *parseResult, mc *messageContext, dm *dynamic.Message, fld *desc.FieldDescriptor, name node, val valueNode) error { - v := val.value() - if sl, ok := v.([]valueNode); ok { +func setOptionField(res *parseResult, mc *messageContext, dm *dynamic.Message, fld *desc.FieldDescriptor, name ast.Node, val ast.ValueNode) error { + v := val.Value() + if sl, ok := v.([]ast.ValueNode); ok { // handle slices a little differently than the others if !fld.IsRepeated() { - return errorWithPos(val.start(), "%vvalue is an array but field is not repeated", mc) + return errorWithPos(val.Start(), "%vvalue is an array but field is not repeated", mc) } origPath := mc.optAggPath defer func() { @@ -1121,7 +1122,7 @@ func setOptionField(res *parseResult, mc *messageContext, dm *dynamic.Message, f if v, err := fieldValue(res, mc, richFldDescriptorish{FieldDescriptor: fld}, item, false); err != nil { return err } else if err = dm.TryAddRepeatedField(fld, v); err != nil { - return errorWithPos(val.start(), "%verror setting value: %s", mc, err) + return errorWithPos(val.Start(), "%verror setting value: %s", mc, err) } } return nil @@ -1135,12 +1136,12 @@ func setOptionField(res *parseResult, mc *messageContext, dm *dynamic.Message, f err = dm.TryAddRepeatedField(fld, v) } else { if dm.HasField(fld) { - return errorWithPos(name.start(), "%vnon-repeated option field %s already set", mc, fieldName(fld)) + return errorWithPos(name.Start(), "%vnon-repeated option field %s already set", mc, fieldName(fld)) } err = dm.TrySetField(fld, v) } if err != nil { - return errorWithPos(val.start(), "%verror setting value: %s", mc, err) + return errorWithPos(val.Start(), "%verror setting value: %s", mc, err) } return nil @@ -1157,7 +1158,7 @@ func findOption(res *parseResult, scope string, opts []*dpb.UninterpretedOption, } if found >= 0 { optNode := res.getOptionNode(opt) - return -1, res.errs.handleErrorWithPos(optNode.getName().start(), "%s: option %s cannot be defined more than once", scope, name) + return -1, res.errs.handleErrorWithPos(optNode.GetName().Start(), "%s: option %s cannot be defined more than once", scope, name) } found = i } @@ -1237,7 +1238,7 @@ func fieldName(fld *desc.FieldDescriptor) string { func valueKind(val interface{}) string { switch val := val.(type) { - case identifier: + case ast.Identifier: return "identifier" case bool: return "bool" @@ -1252,22 +1253,24 @@ func valueKind(val interface{}) string { return "double" case string, []byte: return "string" - case []*aggregateEntryNode: + case []*ast.MessageFieldNode: return "message" + case []ast.ValueNode: + return "array" default: return fmt.Sprintf("%T", val) } } -func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val valueNode, enumAsString bool) (interface{}, error) { - v := val.value() +func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val ast.ValueNode, enumAsString bool) (interface{}, error) { + v := val.Value() t := fld.AsFieldDescriptorProto().GetType() switch t { case dpb.FieldDescriptorProto_TYPE_ENUM: - if id, ok := v.(identifier); ok { + if id, ok := v.(ast.Identifier); ok { ev := fld.GetEnumType().FindValueByName(string(id)) if ev == nil { - return nil, errorWithPos(val.start(), "%venum %s has no value named %s", mc, fld.GetEnumType().GetFullyQualifiedName(), id) + return nil, errorWithPos(val.Start(), "%venum %s has no value named %s", mc, fld.GetEnumType().GetFullyQualifiedName(), id) } if enumAsString { return ev.GetName(), nil @@ -1275,9 +1278,9 @@ func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val return ev.GetNumber(), nil } } - return nil, errorWithPos(val.start(), "%vexpecting enum, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting enum, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_MESSAGE, dpb.FieldDescriptorProto_TYPE_GROUP: - if aggs, ok := v.([]*aggregateEntryNode); ok { + if aggs, ok := v.([]*ast.MessageFieldNode); ok { fmd := fld.GetMessageType() fdm := dynamic.NewMessage(fmd) origPath := mc.optAggPath @@ -1286,13 +1289,13 @@ func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val }() for _, a := range aggs { if origPath == "" { - mc.optAggPath = a.name.value() + mc.optAggPath = a.Name.Value() } else { - mc.optAggPath = origPath + "." + a.name.value() + mc.optAggPath = origPath + "." + a.Name.Value() } var ffld *desc.FieldDescriptor - if a.name.isExtension { - n := a.name.name.val + if a.Name.IsExtension() { + n := string(a.Name.Name.AsIdentifier()) ffld = findExtension(mc.file, n, false, map[fileDescriptorish]struct{}{}) if ffld == nil { // may need to qualify with package name @@ -1302,83 +1305,83 @@ func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val } } } else { - ffld = fmd.FindFieldByName(a.name.value()) + ffld = fmd.FindFieldByName(a.Name.Value()) } if ffld == nil { - return nil, errorWithPos(val.start(), "%vfield %s not found", mc, a.name.name.val) + return nil, errorWithPos(val.Start(), "%vfield %s not found", mc, string(a.Name.Name.AsIdentifier())) } - if err := setOptionField(res, mc, fdm, ffld, a.name, a.val); err != nil { + if err := setOptionField(res, mc, fdm, ffld, a.Name, a.Val); err != nil { return nil, err } } return fdm, nil } - return nil, errorWithPos(val.start(), "%vexpecting message, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting message, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_BOOL: if b, ok := v.(bool); ok { return b, nil } - return nil, errorWithPos(val.start(), "%vexpecting bool, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting bool, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_BYTES: if str, ok := v.(string); ok { return []byte(str), nil } - return nil, errorWithPos(val.start(), "%vexpecting bytes, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting bytes, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_STRING: if str, ok := v.(string); ok { return str, nil } - return nil, errorWithPos(val.start(), "%vexpecting string, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting string, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_INT32, dpb.FieldDescriptorProto_TYPE_SINT32, dpb.FieldDescriptorProto_TYPE_SFIXED32: if i, ok := v.(int64); ok { if i > math.MaxInt32 || i < math.MinInt32 { - return nil, errorWithPos(val.start(), "%vvalue %d is out of range for int32", mc, i) + return nil, errorWithPos(val.Start(), "%vvalue %d is out of range for int32", mc, i) } return int32(i), nil } if ui, ok := v.(uint64); ok { if ui > math.MaxInt32 { - return nil, errorWithPos(val.start(), "%vvalue %d is out of range for int32", mc, ui) + return nil, errorWithPos(val.Start(), "%vvalue %d is out of range for int32", mc, ui) } return int32(ui), nil } - return nil, errorWithPos(val.start(), "%vexpecting int32, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting int32, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_UINT32, dpb.FieldDescriptorProto_TYPE_FIXED32: if i, ok := v.(int64); ok { if i > math.MaxUint32 || i < 0 { - return nil, errorWithPos(val.start(), "%vvalue %d is out of range for uint32", mc, i) + return nil, errorWithPos(val.Start(), "%vvalue %d is out of range for uint32", mc, i) } return uint32(i), nil } if ui, ok := v.(uint64); ok { if ui > math.MaxUint32 { - return nil, errorWithPos(val.start(), "%vvalue %d is out of range for uint32", mc, ui) + return nil, errorWithPos(val.Start(), "%vvalue %d is out of range for uint32", mc, ui) } return uint32(ui), nil } - return nil, errorWithPos(val.start(), "%vexpecting uint32, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting uint32, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_INT64, dpb.FieldDescriptorProto_TYPE_SINT64, dpb.FieldDescriptorProto_TYPE_SFIXED64: if i, ok := v.(int64); ok { return i, nil } if ui, ok := v.(uint64); ok { if ui > math.MaxInt64 { - return nil, errorWithPos(val.start(), "%vvalue %d is out of range for int64", mc, ui) + return nil, errorWithPos(val.Start(), "%vvalue %d is out of range for int64", mc, ui) } return int64(ui), nil } - return nil, errorWithPos(val.start(), "%vexpecting int64, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting int64, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_UINT64, dpb.FieldDescriptorProto_TYPE_FIXED64: if i, ok := v.(int64); ok { if i < 0 { - return nil, errorWithPos(val.start(), "%vvalue %d is out of range for uint64", mc, i) + return nil, errorWithPos(val.Start(), "%vvalue %d is out of range for uint64", mc, i) } return uint64(i), nil } if ui, ok := v.(uint64); ok { return ui, nil } - return nil, errorWithPos(val.start(), "%vexpecting uint64, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting uint64, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_DOUBLE: if d, ok := v.(float64); ok { return d, nil @@ -1389,11 +1392,11 @@ func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val if u, ok := v.(uint64); ok { return float64(u), nil } - return nil, errorWithPos(val.start(), "%vexpecting double, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting double, got %s", mc, valueKind(v)) case dpb.FieldDescriptorProto_TYPE_FLOAT: if d, ok := v.(float64); ok { if (d > math.MaxFloat32 || d < -math.MaxFloat32) && !math.IsInf(d, 1) && !math.IsInf(d, -1) && !math.IsNaN(d) { - return nil, errorWithPos(val.start(), "%vvalue %f is out of range for float", mc, d) + return nil, errorWithPos(val.Start(), "%vvalue %f is out of range for float", mc, d) } return float32(d), nil } @@ -1403,8 +1406,8 @@ func fieldValue(res *parseResult, mc *messageContext, fld fldDescriptorish, val if u, ok := v.(uint64); ok { return float32(u), nil } - return nil, errorWithPos(val.start(), "%vexpecting float, got %s", mc, valueKind(v)) + return nil, errorWithPos(val.Start(), "%vexpecting float, got %s", mc, valueKind(v)) default: - return nil, errorWithPos(val.start(), "%vunrecognized field type: %s", mc, t) + return nil, errorWithPos(val.Start(), "%vunrecognized field type: %s", mc, t) } } diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go index 6025bd0dab2..bbfec8d1f64 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/parser.go @@ -17,6 +17,7 @@ import ( "github.com/jhump/protoreflect/desc" "github.com/jhump/protoreflect/desc/internal" + "github.com/jhump/protoreflect/desc/protoparse/ast" ) //go:generate goyacc -o proto.y.go -p proto proto.y @@ -118,6 +119,12 @@ type Parser struct { // desc.LoadFileDescriptor. LookupImport func(string) (*desc.FileDescriptor, error) + // LookupImportProto has the same functionality as LookupImport, however it returns + // a FileDescriptorProto instead of a FileDescriptor. + // + // It is an error to set both LookupImport and LookupImportProto. + LookupImportProto func(string) (*dpb.FileDescriptorProto, error) + // Used to create a reader for a given filename, when loading proto source // file contents. If unset, os.Open is used. If ImportPaths is also empty // then relative paths are will be relative to the process's current working @@ -200,11 +207,20 @@ func (p Parser) ParseFiles(filenames ...string) ([]*desc.FileDescriptor, error) return nil, ret } } + lookupImport, err := p.getLookupImport() + if err != nil { + return nil, err + } protos := map[string]*parseResult{} - results := &parseResults{resultsByFilename: protos} + results := &parseResults{ + resultsByFilename: protos, + recursive: true, + validate: true, + createDescriptorProtos: true, + } errs := newErrorHandler(p.ErrorReporter, p.WarningReporter) - parseProtoFiles(accessor, filenames, errs, true, true, results, p.LookupImport) + parseProtoFiles(accessor, filenames, errs, results, lookupImport) if err := errs.getError(); err != nil { return nil, err } @@ -248,19 +264,19 @@ func (p Parser) ParseFiles(filenames ...string) ([]*desc.FileDescriptor, error) // scope in which the type reference appears. This goes for fields that // have message and enum types. It also applies to methods and their // references to request and response message types. -// 3. Enum fields are not known. Until a field's type reference is resolved -// (during linking), it is not known whether the type refers to a message -// or an enum. So all fields with such type references have their Type set -// to TYPE_MESSAGE. +// 3. Type references are not known. For non-scalar fields, until the type +// name is resolved (during linking), it is not known whether the type +// refers to a message or an enum. So all fields with such type references +// will not have their Type set, only the TypeName. // // This method will still validate the syntax of parsed files. If the parser's // ValidateUnlinkedFiles field is true, additional checks, beyond syntax will // also be performed. // -// If the Parser has no ErrorReporter set and a syntax or link error occurs, -// parsing will abort with the first such error encountered. If there is an +// If the Parser has no ErrorReporter set and a syntax error occurs, parsing +// will abort with the first such error encountered. If there is an // ErrorReporter configured and it returns non-nil, parsing will abort with the -// error it returns. If syntax or link errors are encountered but the configured +// error it returns. If syntax errors are encountered but the configured // ErrorReporter always returns nil, the parse fails with ErrInvalidSource. func (p Parser) ParseFilesButDoNotLink(filenames ...string) ([]*dpb.FileDescriptorProto, error) { accessor := p.Accessor @@ -269,10 +285,19 @@ func (p Parser) ParseFilesButDoNotLink(filenames ...string) ([]*dpb.FileDescript return os.Open(name) } } + lookupImport, err := p.getLookupImport() + if err != nil { + return nil, err + } protos := map[string]*parseResult{} errs := newErrorHandler(p.ErrorReporter, p.WarningReporter) - parseProtoFiles(accessor, filenames, errs, false, p.ValidateUnlinkedFiles, &parseResults{resultsByFilename: protos}, p.LookupImport) + results := &parseResults{ + resultsByFilename: protos, + validate: p.ValidateUnlinkedFiles, + createDescriptorProtos: true, + } + parseProtoFiles(accessor, filenames, errs, results, lookupImport) if err := errs.getError(); err != nil { return nil, err } @@ -301,6 +326,67 @@ func (p Parser) ParseFilesButDoNotLink(filenames ...string) ([]*dpb.FileDescript return fds, nil } +// ParseToAST parses the named files into ASTs, or Abstract Syntax Trees. This +// is for consumers of proto files that don't care about compiling the files to +// descriptors, but care deeply about a non-lossy structured representation of +// the source (since descriptors are lossy). This includes formatting tools and +// possibly linters, too. +// +// If the requested filenames include standard imports (such as +// "google/protobuf/empty.proto") and no source is provided, the corresponding +// AST in the returned slice will be nil. These standard imports are only +// available for use as descriptors; no source is available unless it is +// provided by the configured Accessor. +// +// If the Parser has no ErrorReporter set and a syntax error occurs, parsing +// will abort with the first such error encountered. If there is an +// ErrorReporter configured and it returns non-nil, parsing will abort with the +// error it returns. If syntax errors are encountered but the configured +// ErrorReporter always returns nil, the parse fails with ErrInvalidSource. +func (p Parser) ParseToAST(filenames ...string) ([]*ast.FileNode, error) { + accessor := p.Accessor + if accessor == nil { + accessor = func(name string) (io.ReadCloser, error) { + return os.Open(name) + } + } + lookupImport, err := p.getLookupImport() + if err != nil { + return nil, err + } + + protos := map[string]*parseResult{} + errs := newErrorHandler(p.ErrorReporter, p.WarningReporter) + parseProtoFiles(accessor, filenames, errs, &parseResults{resultsByFilename: protos}, lookupImport) + if err := errs.getError(); err != nil { + return nil, err + } + ret := make([]*ast.FileNode, 0, len(filenames)) + for _, name := range filenames { + ret = append(ret, protos[name].root) + } + return ret, nil +} + +func (p Parser) getLookupImport() (func(string) (*dpb.FileDescriptorProto, error), error) { + if p.LookupImport != nil && p.LookupImportProto != nil { + return nil, ErrLookupImportAndProtoSet + } + if p.LookupImportProto != nil { + return p.LookupImportProto, nil + } + if p.LookupImport != nil { + return func(path string) (*dpb.FileDescriptorProto, error) { + value, err := p.LookupImport(path) + if value != nil { + return value.AsFileDescriptorProto(), err + } + return nil, err + }, nil + } + return nil, nil +} + func fixupFilenames(protos map[string]*parseResult) map[string]*parseResult { // In the event that the given filenames (keys in the supplied map) do not // match the actual paths used in 'import' statements in the files, we try @@ -409,21 +495,21 @@ func fixupFilenames(protos map[string]*parseResult) map[string]*parseResult { return revisedProtos } -func parseProtoFiles(acc FileAccessor, filenames []string, errs *errorHandler, recursive, validate bool, parsed *parseResults, lookupImport func(string) (*desc.FileDescriptor, error)) { +func parseProtoFiles(acc FileAccessor, filenames []string, errs *errorHandler, parsed *parseResults, lookupImport func(string) (*dpb.FileDescriptorProto, error)) { for _, name := range filenames { - parseProtoFile(acc, name, nil, errs, recursive, validate, parsed, lookupImport) + parseProtoFile(acc, name, nil, errs, parsed, lookupImport) if errs.err != nil { return } } } -func parseProtoFile(acc FileAccessor, filename string, importLoc *SourcePos, errs *errorHandler, recursive, validate bool, parsed *parseResults, lookupImport func(string) (*desc.FileDescriptor, error)) { - if parsed.has(filename) { +func parseProtoFile(acc FileAccessor, filename string, importLoc *SourcePos, errs *errorHandler, results *parseResults, lookupImport func(string) (*dpb.FileDescriptorProto, error)) { + if results.has(filename) { return } if lookupImport == nil { - lookupImport = func(string) (*desc.FileDescriptor, error) { + lookupImport = func(string) (*dpb.FileDescriptorProto, error) { return nil, errors.New("no import lookup function") } } @@ -437,12 +523,12 @@ func parseProtoFile(acc FileAccessor, filename string, importLoc *SourcePos, err // closing need not fail this operation _ = in.Close() }() - result = parseProto(filename, in, errs, validate) + result = parseProto(filename, in, errs, results.validate, results.createDescriptorProtos) }() } else if d, lookupErr := lookupImport(filename); lookupErr == nil { // This is a user-provided descriptor, which is acting similarly to a // well-known import. - result = &parseResult{fd: proto.Clone(d.AsFileDescriptorProto()).(*dpb.FileDescriptorProto)} + result = &parseResult{fd: proto.Clone(d).(*dpb.FileDescriptorProto)} } else if d, ok := standardImports[filename]; ok { // it's a well-known import // (we clone it to make sure we're not sharing state with other @@ -472,20 +558,20 @@ func parseProtoFile(acc FileAccessor, filename string, importLoc *SourcePos, err return } - parsed.add(filename, result) + results.add(filename, result) if errs.err != nil { return // abort } - if recursive { + if results.recursive { fd := result.fd decl := result.getFileNode(fd) - fnode, ok := decl.(*fileNode) + fnode, ok := decl.(*ast.FileNode) if !ok { // no AST for this file? use imports in descriptor for _, dep := range fd.Dependency { - parseProtoFile(acc, dep, decl.start(), errs, true, validate, parsed, lookupImport) + parseProtoFile(acc, dep, decl.Start(), errs, results, lookupImport) if errs.getError() != nil { return // abort } @@ -493,10 +579,12 @@ func parseProtoFile(acc FileAccessor, filename string, importLoc *SourcePos, err return } // we have an AST; use it so we can report import location in errors - for _, dep := range fnode.imports { - parseProtoFile(acc, dep.name.val, dep.name.start(), errs, true, validate, parsed, lookupImport) - if errs.getError() != nil { - return // abort + for _, decl := range fnode.Decls { + if dep, ok := decl.(*ast.ImportNode); ok { + parseProtoFile(acc, dep.Name.AsString(), dep.Name.Start(), errs, results, lookupImport) + if errs.getError() != nil { + return // abort + } } } } @@ -505,6 +593,8 @@ func parseProtoFile(acc FileAccessor, filename string, importLoc *SourcePos, err type parseResults struct { resultsByFilename map[string]*parseResult filenames []string + + recursive, validate, createDescriptorProtos bool } func (r *parseResults) has(filename string) bool { @@ -522,6 +612,8 @@ type parseResult struct { // or validation errs *errorHandler + // the root of the AST + root *ast.FileNode // the parsed file descriptor fd *dpb.FileDescriptorProto @@ -532,141 +624,149 @@ type parseResult struct { // a map of elements in the descriptor to nodes in the AST // (for extracting position information when validating the descriptor) - nodes map[proto.Message]node + nodes map[proto.Message]ast.Node // a map of uninterpreted option AST nodes to their relative path // in the resulting options message - interpretedOptions map[*optionNode][]int32 + interpretedOptions map[*ast.OptionNode][]int32 } -func (r *parseResult) getFileNode(f *dpb.FileDescriptorProto) fileDecl { +func (r *parseResult) getFileNode(f *dpb.FileDescriptorProto) ast.FileDeclNode { if r.nodes == nil { - return noSourceNode{pos: unknownPos(f.GetName())} + return ast.NewNoSourceNode(f.GetName()) } - return r.nodes[f].(fileDecl) + return r.nodes[f].(ast.FileDeclNode) } -func (r *parseResult) getOptionNode(o *dpb.UninterpretedOption) optionDecl { +func (r *parseResult) getOptionNode(o *dpb.UninterpretedOption) ast.OptionDeclNode { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } - return r.nodes[o].(optionDecl) + return r.nodes[o].(ast.OptionDeclNode) } -func (r *parseResult) getOptionNamePartNode(o *dpb.UninterpretedOption_NamePart) node { +func (r *parseResult) getOptionNamePartNode(o *dpb.UninterpretedOption_NamePart) ast.Node { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } return r.nodes[o] } -func (r *parseResult) getFieldNode(f *dpb.FieldDescriptorProto) fieldDecl { +func (r *parseResult) getFieldNode(f *dpb.FieldDescriptorProto) ast.FieldDeclNode { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } - return r.nodes[f].(fieldDecl) + return r.nodes[f].(ast.FieldDeclNode) } -func (r *parseResult) getExtensionRangeNode(e *dpb.DescriptorProto_ExtensionRange) rangeDecl { +func (r *parseResult) getExtensionRangeNode(e *dpb.DescriptorProto_ExtensionRange) ast.RangeDeclNode { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } - return r.nodes[e].(rangeDecl) + return r.nodes[e].(ast.RangeDeclNode) } -func (r *parseResult) getMessageReservedRangeNode(rr *dpb.DescriptorProto_ReservedRange) rangeDecl { +func (r *parseResult) getMessageReservedRangeNode(rr *dpb.DescriptorProto_ReservedRange) ast.RangeDeclNode { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } - return r.nodes[rr].(rangeDecl) + return r.nodes[rr].(ast.RangeDeclNode) } -func (r *parseResult) getEnumNode(e *dpb.EnumDescriptorProto) node { +func (r *parseResult) getEnumNode(e *dpb.EnumDescriptorProto) ast.Node { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } return r.nodes[e] } -func (r *parseResult) getEnumValueNode(e *dpb.EnumValueDescriptorProto) enumValueDecl { +func (r *parseResult) getEnumValueNode(e *dpb.EnumValueDescriptorProto) ast.EnumValueDeclNode { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } - return r.nodes[e].(enumValueDecl) + return r.nodes[e].(ast.EnumValueDeclNode) } -func (r *parseResult) getEnumReservedRangeNode(rr *dpb.EnumDescriptorProto_EnumReservedRange) rangeDecl { +func (r *parseResult) getEnumReservedRangeNode(rr *dpb.EnumDescriptorProto_EnumReservedRange) ast.RangeDeclNode { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } - return r.nodes[rr].(rangeDecl) + return r.nodes[rr].(ast.RangeDeclNode) } -func (r *parseResult) getMethodNode(m *dpb.MethodDescriptorProto) methodDecl { +func (r *parseResult) getMethodNode(m *dpb.MethodDescriptorProto) ast.RPCDeclNode { if r.nodes == nil { - return noSourceNode{pos: unknownPos(r.fd.GetName())} + return ast.NewNoSourceNode(r.fd.GetName()) } - return r.nodes[m].(methodDecl) + return r.nodes[m].(ast.RPCDeclNode) } -func (r *parseResult) putFileNode(f *dpb.FileDescriptorProto, n *fileNode) { +func (r *parseResult) putFileNode(f *dpb.FileDescriptorProto, n *ast.FileNode) { r.nodes[f] = n } -func (r *parseResult) putOptionNode(o *dpb.UninterpretedOption, n *optionNode) { +func (r *parseResult) putOptionNode(o *dpb.UninterpretedOption, n *ast.OptionNode) { r.nodes[o] = n } -func (r *parseResult) putOptionNamePartNode(o *dpb.UninterpretedOption_NamePart, n *optionNamePartNode) { +func (r *parseResult) putOptionNamePartNode(o *dpb.UninterpretedOption_NamePart, n *ast.FieldReferenceNode) { r.nodes[o] = n } -func (r *parseResult) putMessageNode(m *dpb.DescriptorProto, n msgDecl) { +func (r *parseResult) putMessageNode(m *dpb.DescriptorProto, n ast.MessageDeclNode) { r.nodes[m] = n } -func (r *parseResult) putFieldNode(f *dpb.FieldDescriptorProto, n fieldDecl) { +func (r *parseResult) putFieldNode(f *dpb.FieldDescriptorProto, n ast.FieldDeclNode) { r.nodes[f] = n } -func (r *parseResult) putOneOfNode(o *dpb.OneofDescriptorProto, n *oneOfNode) { +func (r *parseResult) putOneOfNode(o *dpb.OneofDescriptorProto, n *ast.OneOfNode) { r.nodes[o] = n } -func (r *parseResult) putExtensionRangeNode(e *dpb.DescriptorProto_ExtensionRange, n *rangeNode) { +func (r *parseResult) putExtensionRangeNode(e *dpb.DescriptorProto_ExtensionRange, n *ast.RangeNode) { r.nodes[e] = n } -func (r *parseResult) putMessageReservedRangeNode(rr *dpb.DescriptorProto_ReservedRange, n *rangeNode) { +func (r *parseResult) putMessageReservedRangeNode(rr *dpb.DescriptorProto_ReservedRange, n *ast.RangeNode) { r.nodes[rr] = n } -func (r *parseResult) putEnumNode(e *dpb.EnumDescriptorProto, n *enumNode) { +func (r *parseResult) putEnumNode(e *dpb.EnumDescriptorProto, n *ast.EnumNode) { r.nodes[e] = n } -func (r *parseResult) putEnumValueNode(e *dpb.EnumValueDescriptorProto, n *enumValueNode) { +func (r *parseResult) putEnumValueNode(e *dpb.EnumValueDescriptorProto, n *ast.EnumValueNode) { r.nodes[e] = n } -func (r *parseResult) putEnumReservedRangeNode(rr *dpb.EnumDescriptorProto_EnumReservedRange, n *rangeNode) { +func (r *parseResult) putEnumReservedRangeNode(rr *dpb.EnumDescriptorProto_EnumReservedRange, n *ast.RangeNode) { r.nodes[rr] = n } -func (r *parseResult) putServiceNode(s *dpb.ServiceDescriptorProto, n *serviceNode) { +func (r *parseResult) putServiceNode(s *dpb.ServiceDescriptorProto, n *ast.ServiceNode) { r.nodes[s] = n } -func (r *parseResult) putMethodNode(m *dpb.MethodDescriptorProto, n *methodNode) { +func (r *parseResult) putMethodNode(m *dpb.MethodDescriptorProto, n *ast.RPCNode) { r.nodes[m] = n } -func parseProto(filename string, r io.Reader, errs *errorHandler, validate bool) *parseResult { +func parseProto(filename string, r io.Reader, errs *errorHandler, validate, createProtos bool) *parseResult { beforeErrs := errs.errsReported lx := newLexer(r, filename, errs) protoParse(lx) - - res := createParseResult(filename, lx.res, errs) + if lx.res == nil || len(lx.res.Children()) == 0 { + // nil AST means there was an error that prevented any parsing + // or the file was empty; synthesize empty non-nil AST + lx.res = ast.NewEmptyFileNode(filename) + } + if lx.eof != nil { + lx.res.FinalComments = lx.eof.LeadingComments() + lx.res.FinalWhitespace = lx.eof.LeadingWhitespace() + } + res := createParseResult(filename, lx.res, errs, createProtos) if validate && errs.err == nil { validateBasic(res, errs.errsReported > beforeErrs) } @@ -674,35 +774,19 @@ func parseProto(filename string, r io.Reader, errs *errorHandler, validate bool) return res } -func createParseResult(filename string, file *fileNode, errs *errorHandler) *parseResult { +func createParseResult(filename string, file *ast.FileNode, errs *errorHandler, createProtos bool) *parseResult { res := &parseResult{ errs: errs, - nodes: map[proto.Message]node{}, - interpretedOptions: map[*optionNode][]int32{}, + root: file, + nodes: map[proto.Message]ast.Node{}, + interpretedOptions: map[*ast.OptionNode][]int32{}, } - if file == nil { - // nil AST means there was an error that prevented any parsing - // or the file was empty; synthesize empty non-nil AST - file = &fileNode{} - n := noSourceNode{pos: unknownPos(filename)} - file.setRange(&n, &n) + if createProtos { + res.createFileDescriptor(filename, file) } - res.createFileDescriptor(filename, file) return res } -func toNameParts(ident *compoundIdentNode) []*optionNamePartNode { - parts := strings.Split(ident.val, ".") - ret := make([]*optionNamePartNode, len(parts)) - offset := 0 - for i, p := range parts { - ret[i] = &optionNamePartNode{text: ident, offset: offset, length: len(p)} - ret[i].setRange(ident, ident) - offset += len(p) + 1 - } - return ret -} - func checkTag(pos *SourcePos, v uint64, maxTag int32) error { if v < 1 { return errorWithPos(pos, "tag number %d must be greater than zero", v) @@ -714,59 +798,68 @@ func checkTag(pos *SourcePos, v uint64, maxTag int32) error { return nil } -func checkExtensionTagsInFile(fd *desc.FileDescriptor, res *parseResult) error { +func checkExtensionsInFile(fd *desc.FileDescriptor, res *parseResult) error { for _, fld := range fd.GetExtensions() { - if err := checkExtensionTag(fld, res); err != nil { + if err := checkExtension(fld, res); err != nil { return err } } for _, md := range fd.GetMessageTypes() { - if err := checkExtensionTagsInMessage(md, res); err != nil { + if err := checkExtensionsInMessage(md, res); err != nil { return err } } return nil } -func checkExtensionTagsInMessage(md *desc.MessageDescriptor, res *parseResult) error { +func checkExtensionsInMessage(md *desc.MessageDescriptor, res *parseResult) error { for _, fld := range md.GetNestedExtensions() { - if err := checkExtensionTag(fld, res); err != nil { + if err := checkExtension(fld, res); err != nil { return err } } for _, nmd := range md.GetNestedMessageTypes() { - if err := checkExtensionTagsInMessage(nmd, res); err != nil { + if err := checkExtensionsInMessage(nmd, res); err != nil { return err } } return nil } -func checkExtensionTag(fld *desc.FieldDescriptor, res *parseResult) error { - // NB: This is kind of gross that we don't enforce this in validateBasic(). But it would - // require doing some minimal linking there (to identify the extendee and locate its - // descriptor). To keep the code simpler, we just wait until things are fully linked. - - // In validateBasic() we just made sure these were within bounds for any message. But - // now that things are linked, we can check if the extendee is messageset wire format - // and, if not, enforce tighter limit. - if !fld.GetOwner().GetMessageOptions().GetMessageSetWireFormat() && fld.GetNumber() > internal.MaxNormalTag { - pos := res.nodes[fld.AsFieldDescriptorProto()].(fieldDecl).fieldTag().start() - return errorWithPos(pos, "tag number %d is higher than max allowed tag number (%d)", fld.GetNumber(), internal.MaxNormalTag) +func checkExtension(fld *desc.FieldDescriptor, res *parseResult) error { + // NB: It's a little gross that we don't enforce these in validateBasic(). + // But requires some minimal linking to resolve the extendee, so we can + // interrogate its descriptor. + if fld.GetOwner().GetMessageOptions().GetMessageSetWireFormat() { + // Message set wire format requires that all extensions be messages + // themselves (no scalar extensions) + if fld.GetType() != dpb.FieldDescriptorProto_TYPE_MESSAGE { + pos := res.getFieldNode(fld.AsFieldDescriptorProto()).FieldType().Start() + return errorWithPos(pos, "messages with message-set wire format cannot contain scalar extensions, only messages") + } + } else { + // In validateBasic() we just made sure these were within bounds for any message. But + // now that things are linked, we can check if the extendee is messageset wire format + // and, if not, enforce tighter limit. + if fld.GetNumber() > internal.MaxNormalTag { + pos := res.getFieldNode(fld.AsFieldDescriptorProto()).FieldTag().Start() + return errorWithPos(pos, "tag number %d is higher than max allowed tag number (%d)", fld.GetNumber(), internal.MaxNormalTag) + } } + return nil } -func aggToString(agg []*aggregateEntryNode, buf *bytes.Buffer) { +func aggToString(agg []*ast.MessageFieldNode, buf *bytes.Buffer) { buf.WriteString("{") for _, a := range agg { buf.WriteString(" ") - buf.WriteString(a.name.value()) - if v, ok := a.val.(*aggregateLiteralNode); ok { - aggToString(v.elements, buf) + buf.WriteString(a.Name.Value()) + if v, ok := a.Val.(*ast.MessageLiteralNode); ok { + aggToString(v.Elements, buf) } else { buf.WriteString(": ") - elementToString(a.val.value(), buf) + elementToString(a.Val.Value(), buf) } } buf.WriteString(" }") @@ -774,7 +867,7 @@ func aggToString(agg []*aggregateEntryNode, buf *bytes.Buffer) { func elementToString(v interface{}, buf *bytes.Buffer) { switch v := v.(type) { - case bool, int64, uint64, identifier: + case bool, int64, uint64, ast.Identifier: _, _ = fmt.Fprintf(buf, "%v", v) case float64: if math.IsInf(v, 1) { @@ -790,7 +883,7 @@ func elementToString(v interface{}, buf *bytes.Buffer) { buf.WriteRune('"') writeEscapedBytes(buf, []byte(v)) buf.WriteRune('"') - case []valueNode: + case []ast.ValueNode: buf.WriteString(": [") first := true for _, e := range v { @@ -799,10 +892,10 @@ func elementToString(v interface{}, buf *bytes.Buffer) { } else { buf.WriteString(", ") } - elementToString(e.value(), buf) + elementToString(e.Value(), buf) } buf.WriteString("]") - case []*aggregateEntryNode: + case []*ast.MessageFieldNode: aggToString(v, buf) } } diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y index b7320707b40..63b32d9d6db 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y @@ -4,8 +4,9 @@ package protoparse //lint:file-ignore SA4006 generated parser has unused values import ( - "fmt" "math" + + "github.com/jhump/protoreflect/desc/protoparse/ast" ) %} @@ -13,47 +14,60 @@ import ( // fields inside this union end up as the fields in a structure known // as ${PREFIX}SymType, of which a reference is passed to the lexer. %union{ - file *fileNode - fileDecls []*fileElement - syn *syntaxNode - pkg *packageNode - imprt *importNode - msg *messageNode - msgDecls []*messageElement - fld *fieldNode - mapFld *mapFieldNode - mapType *mapTypeNode - grp *groupNode - oo *oneOfNode - ooDecls []*oneOfElement - ext *extensionRangeNode - resvd *reservedNode - en *enumNode - enDecls []*enumElement - env *enumValueNode - extend *extendNode - extDecls []*extendElement - svc *serviceNode - svcDecls []*serviceElement - mtd *methodNode - rpcType *rpcTypeNode - opts []*optionNode - optNm []*optionNamePartNode - cmpctOpts *compactOptionsNode - rngs []*rangeNode - names []*compoundStringNode - cid *compoundIdentNode - sl []valueNode - agg []*aggregateEntryNode - aggName *aggregateNameNode - v valueNode - il *compoundIntNode - str *compoundStringNode - s *stringLiteralNode - i *intLiteralNode - f *floatLiteralNode - id *identNode - b *basicNode + file *ast.FileNode + syn *ast.SyntaxNode + fileDecl ast.FileElement + fileDecls []ast.FileElement + pkg *ast.PackageNode + imprt *ast.ImportNode + msg *ast.MessageNode + msgDecl ast.MessageElement + msgDecls []ast.MessageElement + fld *ast.FieldNode + mapFld *ast.MapFieldNode + mapType *ast.MapTypeNode + grp *ast.GroupNode + oo *ast.OneOfNode + ooDecl ast.OneOfElement + ooDecls []ast.OneOfElement + ext *ast.ExtensionRangeNode + resvd *ast.ReservedNode + en *ast.EnumNode + enDecl ast.EnumElement + enDecls []ast.EnumElement + env *ast.EnumValueNode + extend *ast.ExtendNode + extDecl ast.ExtendElement + extDecls []ast.ExtendElement + svc *ast.ServiceNode + svcDecl ast.ServiceElement + svcDecls []ast.ServiceElement + mtd *ast.RPCNode + rpcType *ast.RPCTypeNode + rpcDecl ast.RPCElement + rpcDecls []ast.RPCElement + opt *ast.OptionNode + opts *compactOptionList + ref *ast.FieldReferenceNode + optNms *fieldRefList + cmpctOpts *ast.CompactOptionsNode + rng *ast.RangeNode + rngs *rangeList + names *nameList + cid *identList + tid ast.IdentValueNode + sl *valueList + msgField *ast.MessageFieldNode + msgEntry *messageFieldEntry + msgLit *messageFieldList + v ast.ValueNode + il ast.IntValueNode + str *stringList + s *ast.StringLiteralNode + i *ast.UintLiteralNode + f *ast.FloatLiteralNode + id *ast.IdentNode + b *ast.RuneNode err error } @@ -61,45 +75,58 @@ import ( // really a field name in the above union struct %type file %type syntax -%type fileDecl fileDecls +%type fileDecl +%type fileDecls %type import %type package -%type option compactOption compactOptionDecls rpcOption rpcOptions -%type optionName optionNameComponent +%type option compactOption +%type compactOptionDecls +%type rpcDecl +%type rpcDecls +%type optionNameComponent aggName +%type optionName %type compactOptions %type constant scalarConstant aggregate numLit %type intLit -%type name keyType -%type ident typeIdent -%type aggName +%type name keyType msgElementName extElementName oneofElementName enumElementName +%type ident msgElementIdent extElementIdent oneofElementIdent +%type typeIdent msgElementTypeIdent extElementTypeIdent oneofElementTypeIdent %type constantList -%type aggFields aggField aggFieldEntry -%type field oneofField +%type aggFieldEntry +%type aggField +%type aggFields +%type oneofField msgField extField %type oneof %type group oneofGroup %type mapField %type mapType %type message -%type messageItem messageBody -%type oneofItem oneofBody +%type messageDecl +%type messageDecls +%type ooDecl +%type ooDecls %type fieldNames %type msgReserved enumReserved reservedNames -%type tagRange tagRanges enumRange enumRanges +%type tagRange enumRange +%type tagRanges enumRanges %type extensions %type enum -%type enumItem enumBody -%type enumField +%type enumDecl +%type enumDecls +%type enumValue %type extend -%type extendItem extendBody +%type extendDecl +%type extendDecls %type stringLit %type service -%type serviceItem serviceBody +%type serviceDecl +%type serviceDecls %type rpc %type rpcType // same for terminals -%token _STRING_LIT -%token _INT_LIT +%token _STRING_LIT +%token _INT_LIT %token _FLOAT_LIT %token _NAME %token _SYNTAX _IMPORT _WEAK _PUBLIC _PACKAGE _OPTION _TRUE _FALSE _INF _NAN _REPEATED _OPTIONAL _REQUIRED @@ -114,478 +141,525 @@ import ( %% file : syntax { - $$ = &fileNode{syntax: $1} - $$.setRange($1, $1) + $$ = ast.NewFileNode($1, nil) protolex.(*protoLex).res = $$ } | fileDecls { - $$ = &fileNode{decls: $1} - if len($1) > 0 { - $$.setRange($1[0], $1[len($1)-1]) - } + $$ = ast.NewFileNode(nil, $1) protolex.(*protoLex).res = $$ } | syntax fileDecls { - $$ = &fileNode{syntax: $1, decls: $2} - var end node - if len($2) > 0 { - end = $2[len($2)-1] - } else { - end = $1 - } - $$.setRange($1, end) + $$ = ast.NewFileNode($1, $2) protolex.(*protoLex).res = $$ } | { } fileDecls : fileDecls fileDecl { - $$ = append($1, $2...) + if $2 != nil { + $$ = append($1, $2) + } else { + $$ = $1 + } + } + | fileDecl { + if $1 != nil { + $$ = []ast.FileElement{$1} + } else { + $$ = nil + } } - | fileDecl fileDecl : import { - $$ = []*fileElement{{imp: $1}} + $$ = $1 } | package { - $$ = []*fileElement{{pkg: $1}} + $$ = $1 } | option { - $$ = []*fileElement{{option: $1[0]}} + $$ = $1 } | message { - $$ = []*fileElement{{message: $1}} + $$ = $1 } | enum { - $$ = []*fileElement{{enum: $1}} + $$ = $1 } | extend { - $$ = []*fileElement{{extend: $1}} + $$ = $1 } | service { - $$ = []*fileElement{{service: $1}} + $$ = $1 } | ';' { - $$ = []*fileElement{{empty: $1}} + $$ = ast.NewEmptyDeclNode($1) } | error ';' { + $$ = nil } | error { + $$ = nil } syntax : _SYNTAX '=' stringLit ';' { - $$ = &syntaxNode{syntax: $3} - $$.setRange($1, $4) + $$ = ast.NewSyntaxNode($1.ToKeyword(), $2, $3.toStringValueNode(), $4) } import : _IMPORT stringLit ';' { - $$ = &importNode{ name: $2 } - $$.setRange($1, $3) + $$ = ast.NewImportNode($1.ToKeyword(), nil, nil, $2.toStringValueNode(), $3) } | _IMPORT _WEAK stringLit ';' { - $$ = &importNode{ name: $3, weak: true } - $$.setRange($1, $4) + $$ = ast.NewImportNode($1.ToKeyword(), nil, $2.ToKeyword(), $3.toStringValueNode(), $4) } | _IMPORT _PUBLIC stringLit ';' { - $$ = &importNode{ name: $3, public: true } - $$.setRange($1, $4) + $$ = ast.NewImportNode($1.ToKeyword(), $2.ToKeyword(), nil, $3.toStringValueNode(), $4) } package : _PACKAGE ident ';' { - $$ = &packageNode{name: $2} - $$.setRange($1, $3) + $$ = ast.NewPackageNode($1.ToKeyword(), $2.toIdentValueNode(nil), $3) } ident : name { - $$ = &compoundIdentNode{val: $1.val} - $$.setRange($1, $1) - } - | ident '.' name { - $$ = &compoundIdentNode{val: $1.val + "." + $3.val} - $$.setRange($1, $3) + $$ = &identList{$1, nil, nil} + } + | name '.' ident { + $$ = &identList{$1, $2, $3} } -option : _OPTION optionName '=' constant ';' { - n := &optionNameNode{parts: $2} - n.setRange($2[0], $2[len($2)-1]) - o := &optionNode{name: n, val: $4} - o.setRange($1, $5) - $$ = []*optionNode{o} +// to mimic limitations of protoc recursive-descent parser, +// we don't allowed message statement keywords as identifiers +// (or oneof statement keywords [e.g. "option"] below) + +msgElementIdent : msgElementName { + $$ = &identList{$1, nil, nil} + } + | msgElementName '.' ident { + $$ = &identList{$1, $2, $3} } -optionName : optionNameComponent - | - optionName '.' optionNameComponent { - $$ = append($1, $3...) +extElementIdent : extElementName { + $$ = &identList{$1, nil, nil} + } + | extElementName '.' ident { + $$ = &identList{$1, $2, $3} } +oneofElementIdent : oneofElementName { + $$ = &identList{$1, nil, nil} + } + | oneofElementName '.' ident { + $$ = &identList{$1, $2, $3} + } + +option : _OPTION optionName '=' constant ';' { + refs, dots := $2.toNodes() + optName := ast.NewOptionNameNode(refs, dots) + $$ = ast.NewOptionNode($1.ToKeyword(), optName, $3, $4, $5) + } + +optionName : optionNameComponent { + $$ = &fieldRefList{$1, nil, nil} + } + | optionNameComponent '.' optionName { + $$ = &fieldRefList{$1, $2, $3} + } optionNameComponent : name { - nm := &compoundIdentNode{val: $1.val} - nm.setRange($1, $1) - $$ = toNameParts(nm) + $$ = ast.NewFieldReferenceNode($1) } | '(' typeIdent ')' { - p := &optionNamePartNode{text: $2, isExtension: true} - p.setRange($1, $3) - $$ = []*optionNamePartNode{p} + $$ = ast.NewExtensionFieldReferenceNode($1, $2, $3) } constant : scalarConstant | aggregate scalarConstant : stringLit { - $$ = $1 + $$ = $1.toStringValueNode() } | numLit | name { - if $1.val == "true" { - $$ = &boolLiteralNode{identNode: $1, val: true} - } else if $1.val == "false" { - $$ = &boolLiteralNode{identNode: $1, val: false} - } else if $1.val == "inf" { - f := &compoundFloatNode{val: math.Inf(1)} - f.setRange($1, $1) - $$ = f - } else if $1.val == "nan" { - f := &compoundFloatNode{val: math.NaN()} - f.setRange($1, $1) - $$ = f + if $1.Val == "true" || $1.Val == "false" { + $$ = ast.NewBoolLiteralNode($1.ToKeyword()) + } else if $1.Val == "inf" || $1.Val == "nan" { + $$ = ast.NewSpecialFloatLiteralNode($1.ToKeyword()) } else { $$ = $1 } } numLit : _FLOAT_LIT { - $$ = $1 - } + $$ = $1 + } | '-' _FLOAT_LIT { - f := &compoundFloatNode{val: -$2.val} - f.setRange($1, $2) - $$ = f + $$ = ast.NewSignedFloatLiteralNode($1, $2) } | '+' _FLOAT_LIT { - f := &compoundFloatNode{val: $2.val} - f.setRange($1, $2) - $$ = f + $$ = ast.NewSignedFloatLiteralNode($1, $2) } | '+' _INF { - f := &compoundFloatNode{val: math.Inf(1)} - f.setRange($1, $2) - $$ = f + f := ast.NewSpecialFloatLiteralNode($2.ToKeyword()) + $$ = ast.NewSignedFloatLiteralNode($1, f) } | '-' _INF { - f := &compoundFloatNode{val: math.Inf(-1)} - f.setRange($1, $2) - $$ = f + f := ast.NewSpecialFloatLiteralNode($2.ToKeyword()) + $$ = ast.NewSignedFloatLiteralNode($1, f) } | _INT_LIT { - $$ = $1 - } - | '+' _INT_LIT { - i := &compoundUintNode{val: $2.val} - i.setRange($1, $2) - $$ = i - } - | '-' _INT_LIT { - if $2.val > math.MaxInt64 + 1 { - // can't represent as int so treat as float literal - f := &compoundFloatNode{val: -float64($2.val)} - f.setRange($1, $2) - $$ = f - } else { - i := &compoundIntNode{val: -int64($2.val)} - i.setRange($1, $2) - $$ = i - } - } + $$ = $1 + } + | '+' _INT_LIT { + $$ = ast.NewPositiveUintLiteralNode($1, $2) + } + | '-' _INT_LIT { + if $2.Val > math.MaxInt64 + 1 { + // can't represent as int so treat as float literal + $$ = ast.NewSignedFloatLiteralNode($1, $2) + } else { + $$ = ast.NewNegativeIntLiteralNode($1, $2) + } + } stringLit : _STRING_LIT { - $$ = &compoundStringNode{val: $1.val} - $$.setRange($1, $1) - } - | stringLit _STRING_LIT { - $$ = &compoundStringNode{val: $1.val + $2.val} - $$.setRange($1, $2) - } + $$ = &stringList{$1, nil} + } + | _STRING_LIT stringLit { + $$ = &stringList{$1, $2} + } aggregate : '{' aggFields '}' { - a := &aggregateLiteralNode{elements: $2} - a.setRange($1, $3) - $$ = a + fields, delims := $2.toNodes() + $$ = ast.NewMessageLiteralNode($1, fields, delims, $3) } -aggFields : aggField - | aggFields aggField { - $$ = append($1, $2...) +aggFields : aggField { + if $1 != nil { + $$ = &messageFieldList{$1, nil} + } else { + $$ = nil + } + } + | aggField aggFields { + if $1 != nil { + $$ = &messageFieldList{$1, $2} + } else { + $$ = $2 + } } | { $$ = nil } -aggField : aggFieldEntry +aggField : aggFieldEntry { + if $1 != nil { + $$ = &messageFieldEntry{$1, nil} + } else { + $$ = nil + } + } | aggFieldEntry ',' { - $$ = $1 + if $1 != nil { + $$ = &messageFieldEntry{$1, $2} + } else { + $$ = nil + } } | aggFieldEntry ';' { - $$ = $1 + if $1 != nil { + $$ = &messageFieldEntry{$1, $2} + } else { + $$ = nil + } } | error ',' { + $$ = nil } | error ';' { + $$ = nil } | error { + $$ = nil } aggFieldEntry : aggName ':' scalarConstant { - a := &aggregateEntryNode{name: $1, val: $3} - a.setRange($1, $3) - $$ = []*aggregateEntryNode{a} + if $1 != nil { + $$ = ast.NewMessageFieldNode($1, $2, $3) + } else { + $$ = nil + } } | aggName ':' '[' ']' { - s := &sliceLiteralNode{} - s.setRange($3, $4) - a := &aggregateEntryNode{name: $1, val: s} - a.setRange($1, $4) - $$ = []*aggregateEntryNode{a} + if $1 != nil { + val := ast.NewArrayLiteralNode($3, nil, nil, $4) + $$ = ast.NewMessageFieldNode($1, $2, val) + } else { + $$ = nil + } } | aggName ':' '[' constantList ']' { - s := &sliceLiteralNode{elements: $4} - s.setRange($3, $5) - a := &aggregateEntryNode{name: $1, val: s} - a.setRange($1, $5) - $$ = []*aggregateEntryNode{a} + if $1 != nil { + vals, commas := $4.toNodes() + val := ast.NewArrayLiteralNode($3, vals, commas, $5) + $$ = ast.NewMessageFieldNode($1, $2, val) + } else { + $$ = nil + } } | aggName ':' '[' error ']' { + $$ = nil } | aggName ':' aggregate { - a := &aggregateEntryNode{name: $1, val: $3} - a.setRange($1, $3) - $$ = []*aggregateEntryNode{a} + if $1 != nil { + $$ = ast.NewMessageFieldNode($1, $2, $3) + } else { + $$ = nil + } } | aggName aggregate { - a := &aggregateEntryNode{name: $1, val: $2} - a.setRange($1, $2) - $$ = []*aggregateEntryNode{a} + if $1 != nil { + $$ = ast.NewMessageFieldNode($1, nil, $2) + } else { + $$ = nil + } } | aggName ':' '<' aggFields '>' { - s := &aggregateLiteralNode{elements: $4} - s.setRange($3, $5) - a := &aggregateEntryNode{name: $1, val: s} - a.setRange($1, $5) - $$ = []*aggregateEntryNode{a} + if $1 != nil { + fields, delims := $4.toNodes() + msg := ast.NewMessageLiteralNode($3, fields, delims, $5) + $$ = ast.NewMessageFieldNode($1, $2, msg) + } else { + $$ = nil + } } | aggName '<' aggFields '>' { - s := &aggregateLiteralNode{elements: $3} - s.setRange($2, $4) - a := &aggregateEntryNode{name: $1, val: s} - a.setRange($1, $4) - $$ = []*aggregateEntryNode{a} + if $1 != nil { + fields, delims := $3.toNodes() + msg := ast.NewMessageLiteralNode($2, fields, delims, $4) + $$ = ast.NewMessageFieldNode($1, nil, msg) + } else { + $$ = nil + } } | aggName ':' '<' error '>' { + $$ = nil } | aggName '<' error '>' { + $$ = nil } aggName : name { - n := &compoundIdentNode{val: $1.val} - n.setRange($1, $1) - $$ = &aggregateNameNode{name: n} - $$.setRange($1, $1) + $$ = ast.NewFieldReferenceNode($1) } | '[' typeIdent ']' { - $$ = &aggregateNameNode{name: $2, isExtension: true} - $$.setRange($1, $3) + $$ = ast.NewExtensionFieldReferenceNode($1, $2, $3) } | '[' error ']' { + $$ = nil } constantList : constant { - $$ = []valueNode{$1} - } - | constantList ',' constant { - $$ = append($1, $3) + $$ = &valueList{$1, nil, nil} } - | constantList ';' constant { - $$ = append($1, $3) + | constant ',' constantList { + $$ = &valueList{$1, $2, $3} } | '<' aggFields '>' { - s := &aggregateLiteralNode{elements: $2} - s.setRange($1, $3) - $$ = []valueNode{s} + fields, delims := $2.toNodes() + msg := ast.NewMessageLiteralNode($1, fields, delims, $3) + $$ = &valueList{msg, nil, nil} } - | constantList ',' '<' aggFields '>' { - s := &aggregateLiteralNode{elements: $4} - s.setRange($3, $5) - $$ = append($1, s) - } - | constantList ';' '<' aggFields '>' { - s := &aggregateLiteralNode{elements: $4} - s.setRange($3, $5) - $$ = append($1, s) + | '<' aggFields '>' ',' constantList { + fields, delims := $2.toNodes() + msg := ast.NewMessageLiteralNode($1, fields, delims, $3) + $$ = &valueList{msg, $4, $5} } | '<' error '>' { + $$ = nil } - | constantList ',' '<' error '>' { + | '<' error '>' ',' constantList { + $$ = $5 } - | constantList ';' '<' error '>' { + +typeIdent : ident { + $$ = $1.toIdentValueNode(nil) + } + | '.' ident { + $$ = $2.toIdentValueNode($1) } -typeIdent : ident - | '.' ident { - $$ = &compoundIdentNode{val: "." + $2.val} - $$.setRange($1, $2) - } +msgElementTypeIdent : msgElementIdent { + $$ = $1.toIdentValueNode(nil) + } + | '.' ident { + $$ = $2.toIdentValueNode($1) + } + +extElementTypeIdent : extElementIdent { + $$ = $1.toIdentValueNode(nil) + } + | '.' ident { + $$ = $2.toIdentValueNode($1) + } -field : _REQUIRED typeIdent name '=' _INT_LIT ';' { - lbl := fieldLabel{identNode: $1, required: true} - $$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5} - $$.setRange($1, $6) +oneofElementTypeIdent : oneofElementIdent { + $$ = $1.toIdentValueNode(nil) + } + | '.' ident { + $$ = $2.toIdentValueNode($1) + } + +msgField : _REQUIRED typeIdent name '=' _INT_LIT ';' { + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, nil, $6) } | _OPTIONAL typeIdent name '=' _INT_LIT ';' { - lbl := fieldLabel{identNode: $1} - $$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5} - $$.setRange($1, $6) + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, nil, $6) } | _REPEATED typeIdent name '=' _INT_LIT ';' { - lbl := fieldLabel{identNode: $1, repeated: true} - $$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5} - $$.setRange($1, $6) + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, nil, $6) } - | typeIdent name '=' _INT_LIT ';' { - $$ = &fieldNode{fldType: $1, name: $2, tag: $4} - $$.setRange($1, $5) + | _REQUIRED typeIdent name '=' _INT_LIT compactOptions ';' { + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, $6, $7) + } + | _OPTIONAL typeIdent name '=' _INT_LIT compactOptions ';' { + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, $6, $7) + } + | _REPEATED typeIdent name '=' _INT_LIT compactOptions ';' { + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, $6, $7) + } + | msgElementTypeIdent name '=' _INT_LIT ';' { + $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, nil, $5) + } + | msgElementTypeIdent name '=' _INT_LIT compactOptions ';' { + $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, $5, $6) + } + +extField : _REQUIRED typeIdent name '=' _INT_LIT ';' { + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, nil, $6) + } + | _OPTIONAL typeIdent name '=' _INT_LIT ';' { + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, nil, $6) + } + | _REPEATED typeIdent name '=' _INT_LIT ';' { + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, nil, $6) } | _REQUIRED typeIdent name '=' _INT_LIT compactOptions ';' { - lbl := fieldLabel{identNode: $1, required: true} - $$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $6} - $$.setRange($1, $7) + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, $6, $7) } | _OPTIONAL typeIdent name '=' _INT_LIT compactOptions ';' { - lbl := fieldLabel{identNode: $1} - $$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $6} - $$.setRange($1, $7) + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, $6, $7) } | _REPEATED typeIdent name '=' _INT_LIT compactOptions ';' { - lbl := fieldLabel{identNode: $1, repeated: true} - $$ = &fieldNode{label: lbl, fldType: $2, name: $3, tag: $5, options: $6} - $$.setRange($1, $7) + $$ = ast.NewFieldNode($1.ToKeyword(), $2, $3, $4, $5, $6, $7) + } + | extElementTypeIdent name '=' _INT_LIT ';' { + $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, nil, $5) } - | typeIdent name '=' _INT_LIT compactOptions ';' { - $$ = &fieldNode{fldType: $1, name: $2, tag: $4, options: $5} - $$.setRange($1, $6) + | extElementTypeIdent name '=' _INT_LIT compactOptions ';' { + $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, $5, $6) } compactOptions: '[' compactOptionDecls ']' { - $$ = &compactOptionsNode{decls: $2} - $$.setRange($1, $3) - } + opts, commas := $2.toNodes() + $$ = ast.NewCompactOptionsNode($1, opts, commas, $3) + } -compactOptionDecls : compactOptionDecls ',' compactOption { - $$ = append($1, $3...) +compactOptionDecls : compactOption { + $$ = &compactOptionList{$1, nil, nil} + } + | compactOption ',' compactOptionDecls { + $$ = &compactOptionList{$1, $2, $3} } - | compactOption compactOption: optionName '=' constant { - n := &optionNameNode{parts: $1} - n.setRange($1[0], $1[len($1)-1]) - o := &optionNode{name: n, val: $3} - o.setRange($1[0], $3) - $$ = []*optionNode{o} + refs, dots := $1.toNodes() + optName := ast.NewOptionNameNode(refs, dots) + $$ = ast.NewCompactOptionNode(optName, $2, $3) } -group : _REQUIRED _GROUP name '=' _INT_LIT '{' messageBody '}' { - lbl := fieldLabel{identNode: $1, required: true} - $$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7} - $$.setRange($1, $8) +group : _REQUIRED _GROUP name '=' _INT_LIT '{' messageDecls '}' { + $$ = ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, nil, $6, $7, $8) } - | _OPTIONAL _GROUP name '=' _INT_LIT '{' messageBody '}' { - lbl := fieldLabel{identNode: $1} - $$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7} - $$.setRange($1, $8) + | _OPTIONAL _GROUP name '=' _INT_LIT '{' messageDecls '}' { + $$ = ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, nil, $6, $7, $8) } - | _REPEATED _GROUP name '=' _INT_LIT '{' messageBody '}' { - lbl := fieldLabel{identNode: $1, repeated: true} - $$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, decls: $7} - $$.setRange($1, $8) + | _REPEATED _GROUP name '=' _INT_LIT '{' messageDecls '}' { + $$ = ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, nil, $6, $7, $8) } - | _REQUIRED _GROUP name '=' _INT_LIT compactOptions '{' messageBody '}' { - lbl := fieldLabel{identNode: $1, required: true} - $$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, options: $6, decls: $8} - $$.setRange($1, $9) + | _REQUIRED _GROUP name '=' _INT_LIT compactOptions '{' messageDecls '}' { + $$ = ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, $6, $7, $8, $9) } - | _OPTIONAL _GROUP name '=' _INT_LIT compactOptions '{' messageBody '}' { - lbl := fieldLabel{identNode: $1} - $$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, options: $6, decls: $8} - $$.setRange($1, $9) + | _OPTIONAL _GROUP name '=' _INT_LIT compactOptions '{' messageDecls '}' { + $$ = ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, $6, $7, $8, $9) } - | _REPEATED _GROUP name '=' _INT_LIT compactOptions '{' messageBody '}' { - lbl := fieldLabel{identNode: $1, repeated: true} - $$ = &groupNode{groupKeyword: $2, label: lbl, name: $3, tag: $5, options: $6, decls: $8} - $$.setRange($1, $9) + | _REPEATED _GROUP name '=' _INT_LIT compactOptions '{' messageDecls '}' { + $$ = ast.NewGroupNode($1.ToKeyword(), $2.ToKeyword(), $3, $4, $5, $6, $7, $8, $9) } -oneof : _ONEOF name '{' oneofBody '}' { - $$ = &oneOfNode{name: $2, decls: $4} - $$.setRange($1, $5) +oneof : _ONEOF name '{' ooDecls '}' { + $$ = ast.NewOneOfNode($1.ToKeyword(), $2, $3, $4, $5) } -oneofBody : oneofBody oneofItem { - $$ = append($1, $2...) +ooDecls : ooDecls ooDecl { + if $2 != nil { + $$ = append($1, $2) + } else { + $$ = $1 + } + } + | ooDecl { + if $1 != nil { + $$ = []ast.OneOfElement{$1} + } else { + $$ = nil + } } - | oneofItem | { $$ = nil } -oneofItem : option { - $$ = []*oneOfElement{{option: $1[0]}} +ooDecl : option { + $$ = $1 } | oneofField { - $$ = []*oneOfElement{{field: $1}} + $$ = $1 } | oneofGroup { - $$ = []*oneOfElement{{group: $1}} + $$ = $1 } | ';' { - $$ = []*oneOfElement{{empty: $1}} + $$ = ast.NewEmptyDeclNode($1) } | error ';' { + $$ = nil } | error { + $$ = nil } -oneofField : typeIdent name '=' _INT_LIT ';' { - $$ = &fieldNode{fldType: $1, name: $2, tag: $4} - $$.setRange($1, $5) +oneofField : oneofElementTypeIdent name '=' _INT_LIT ';' { + $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, nil, $5) } - | typeIdent name '=' _INT_LIT compactOptions ';' { - $$ = &fieldNode{fldType: $1, name: $2, tag: $4, options: $5} - $$.setRange($1, $6) + | oneofElementTypeIdent name '=' _INT_LIT compactOptions ';' { + $$ = ast.NewFieldNode(nil, $1, $2, $3, $4, $5, $6) } -oneofGroup : _GROUP name '=' _INT_LIT '{' messageBody '}' { - $$ = &groupNode{groupKeyword: $1, name: $2, tag: $4, decls: $6} - $$.setRange($1, $7) +oneofGroup : _GROUP name '=' _INT_LIT '{' messageDecls '}' { + $$ = ast.NewGroupNode(nil, $1.ToKeyword(), $2, $3, $4, nil, $5, $6, $7) } - | _GROUP name '=' _INT_LIT compactOptions '{' messageBody '}' { - $$ = &groupNode{groupKeyword: $1, name: $2, tag: $4, options: $5, decls: $7} - $$.setRange($1, $8) + | _GROUP name '=' _INT_LIT compactOptions '{' messageDecls '}' { + $$ = ast.NewGroupNode(nil, $1.ToKeyword(), $2, $3, $4, $5, $6, $7, $8) } mapField : mapType name '=' _INT_LIT ';' { - $$ = &mapFieldNode{mapType: $1, name: $2, tag: $4} - $$.setRange($1, $5) + $$ = ast.NewMapFieldNode($1, $2, $3, $4, nil, $5) } | mapType name '=' _INT_LIT compactOptions ';' { - $$ = &mapFieldNode{mapType: $1, name: $2, tag: $4, options: $5} - $$.setRange($1, $6) + $$ = ast.NewMapFieldNode($1, $2, $3, $4, $5, $6) } mapType : _MAP '<' keyType ',' typeIdent '>' { - $$ = &mapTypeNode{mapKeyword: $1, keyType: $3, valueType: $5} - $$.setRange($1, $6) -} + $$ = ast.NewMapTypeNode($1.ToKeyword(), $2, $3, $4, $5, $6) + } keyType : _INT32 | _INT64 @@ -601,220 +675,245 @@ keyType : _INT32 | _STRING extensions : _EXTENSIONS tagRanges ';' { - $$ = &extensionRangeNode{ranges: $2} - $$.setRange($1, $3) + ranges, commas := $2.toNodes() + $$ = ast.NewExtensionRangeNode($1.ToKeyword(), ranges, commas, nil, $3) } | _EXTENSIONS tagRanges compactOptions ';' { - $$ = &extensionRangeNode{ranges: $2, options: $3} - $$.setRange($1, $4) + ranges, commas := $2.toNodes() + $$ = ast.NewExtensionRangeNode($1.ToKeyword(), ranges, commas, $3, $4) } -tagRanges : tagRanges ',' tagRange { - $$ = append($1, $3...) +tagRanges : tagRange { + $$ = &rangeList{$1, nil, nil} + } + | tagRange ',' tagRanges { + $$ = &rangeList{$1, $2, $3} } - | tagRange tagRange : _INT_LIT { - r := &rangeNode{startNode: $1} - r.setRange($1, $1) - $$ = []*rangeNode{r} + $$ = ast.NewRangeNode($1, nil, nil, nil) } | _INT_LIT _TO _INT_LIT { - r := &rangeNode{startNode: $1, endNode: $3} - r.setRange($1, $3) - $$ = []*rangeNode{r} + $$ = ast.NewRangeNode($1, $2.ToKeyword(), $3, nil) } | _INT_LIT _TO _MAX { - r := &rangeNode{startNode: $1, endNode: $3, endMax: true} - r.setRange($1, $3) - $$ = []*rangeNode{r} + $$ = ast.NewRangeNode($1, $2.ToKeyword(), nil, $3.ToKeyword()) } -enumRanges : enumRanges ',' enumRange { - $$ = append($1, $3...) +enumRanges : enumRange { + $$ = &rangeList{$1, nil, nil} + } + | enumRange ',' enumRanges { + $$ = &rangeList{$1, $2, $3} } - | enumRange enumRange : intLit { - r := &rangeNode{startNode: $1} - r.setRange($1, $1) - $$ = []*rangeNode{r} + $$ = ast.NewRangeNode($1, nil, nil, nil) } | intLit _TO intLit { - r := &rangeNode{startNode: $1, endNode: $3} - r.setRange($1, $3) - $$ = []*rangeNode{r} + $$ = ast.NewRangeNode($1, $2.ToKeyword(), $3, nil) } | intLit _TO _MAX { - r := &rangeNode{startNode: $1, endNode: $3, endMax: true} - r.setRange($1, $3) - $$ = []*rangeNode{r} + $$ = ast.NewRangeNode($1, $2.ToKeyword(), nil, $3.ToKeyword()) } intLit : _INT_LIT { - i := &compoundIntNode{val: int64($1.val)} - i.setRange($1, $1) - $$ = i + $$ = $1 } | '-' _INT_LIT { - if $2.val > math.MaxInt64 + 1 { - lexError(protolex, $2.start(), fmt.Sprintf("numeric constant %d would underflow 64-bit signed int (allowed range is %d to %d)", $2.val, int64(math.MinInt64), int64(math.MaxInt64))) - } - i := &compoundIntNode{val: -int64($2.val)} - i.setRange($1, $2) - $$ = i + $$ = ast.NewNegativeIntLiteralNode($1, $2) } msgReserved : _RESERVED tagRanges ';' { - $$ = &reservedNode{ranges: $2} - $$.setRange($1, $3) + ranges, commas := $2.toNodes() + $$ = ast.NewReservedRangesNode($1.ToKeyword(), ranges, commas, $3) } | reservedNames enumReserved : _RESERVED enumRanges ';' { - $$ = &reservedNode{ranges: $2} - $$.setRange($1, $3) + ranges, commas := $2.toNodes() + $$ = ast.NewReservedRangesNode($1.ToKeyword(), ranges, commas, $3) } | reservedNames reservedNames : _RESERVED fieldNames ';' { - $$ = &reservedNode{names: $2} - $$.setRange($1, $3) + names, commas := $2.toNodes() + $$ = ast.NewReservedNamesNode($1.ToKeyword(), names, commas, $3) } -fieldNames : fieldNames ',' stringLit { - $$ = append($1, $3) +fieldNames : stringLit { + $$ = &nameList{$1.toStringValueNode(), nil, nil} } - | stringLit { - $$ = []*compoundStringNode{$1} + | stringLit ',' fieldNames { + $$ = &nameList{$1.toStringValueNode(), $2, $3} } -enum : _ENUM name '{' enumBody '}' { - $$ = &enumNode{name: $2, decls: $4} - $$.setRange($1, $5) +enum : _ENUM name '{' enumDecls '}' { + $$ = ast.NewEnumNode($1.ToKeyword(), $2, $3, $4, $5) } -enumBody : enumBody enumItem { - $$ = append($1, $2...) +enumDecls : enumDecls enumDecl { + if $2 != nil { + $$ = append($1, $2) + } else { + $$ = $1 + } + } + | enumDecl { + if $1 != nil { + $$ = []ast.EnumElement{$1} + } else { + $$ = nil + } } - | enumItem | { $$ = nil } -enumItem : option { - $$ = []*enumElement{{option: $1[0]}} +enumDecl : option { + $$ = $1 } - | enumField { - $$ = []*enumElement{{value: $1}} + | enumValue { + $$ = $1 } | enumReserved { - $$ = []*enumElement{{reserved: $1}} + $$ = $1 } | ';' { - $$ = []*enumElement{{empty: $1}} + $$ = ast.NewEmptyDeclNode($1) } | error ';' { + $$ = nil } | error { + $$ = nil } -enumField : name '=' intLit ';' { - $$ = &enumValueNode{name: $1, number: $3} - $$.setRange($1, $4) +enumValue : enumElementName '=' intLit ';' { + $$ = ast.NewEnumValueNode($1, $2, $3, nil, $4) } - | name '=' intLit compactOptions ';' { - $$ = &enumValueNode{name: $1, number: $3, options: $4} - $$.setRange($1, $5) + | enumElementName '=' intLit compactOptions ';' { + $$ = ast.NewEnumValueNode($1, $2, $3, $4, $5) } -message : _MESSAGE name '{' messageBody '}' { - $$ = &messageNode{name: $2, decls: $4} - $$.setRange($1, $5) +message : _MESSAGE name '{' messageDecls '}' { + $$ = ast.NewMessageNode($1.ToKeyword(), $2, $3, $4, $5) } -messageBody : messageBody messageItem { - $$ = append($1, $2...) +messageDecls : messageDecls messageDecl { + if $2 != nil { + $$ = append($1, $2) + } else { + $$ = $1 + } + } + | messageDecl { + if $1 != nil { + $$ = []ast.MessageElement{$1} + } else { + $$ = nil + } } - | messageItem | { $$ = nil } -messageItem : field { - $$ = []*messageElement{{field: $1}} +messageDecl : msgField { + $$ = $1 } | enum { - $$ = []*messageElement{{enum: $1}} + $$ = $1 } | message { - $$ = []*messageElement{{nested: $1}} + $$ = $1 } | extend { - $$ = []*messageElement{{extend: $1}} + $$ = $1 } | extensions { - $$ = []*messageElement{{extensionRange: $1}} + $$ = $1 } | group { - $$ = []*messageElement{{group: $1}} + $$ = $1 } | option { - $$ = []*messageElement{{option: $1[0]}} + $$ = $1 } | oneof { - $$ = []*messageElement{{oneOf: $1}} + $$ = $1 } | mapField { - $$ = []*messageElement{{mapField: $1}} + $$ = $1 } | msgReserved { - $$ = []*messageElement{{reserved: $1}} + $$ = $1 } | ';' { - $$ = []*messageElement{{empty: $1}} + $$ = ast.NewEmptyDeclNode($1) } | error ';' { + $$ = nil } | error { + $$ = nil } -extend : _EXTEND typeIdent '{' extendBody '}' { - $$ = &extendNode{extendee: $2, decls: $4} - $$.setRange($1, $5) +extend : _EXTEND typeIdent '{' extendDecls '}' { + $$ = ast.NewExtendNode($1.ToKeyword(), $2, $3, $4, $5) } -extendBody : extendBody extendItem { - $$ = append($1, $2...) +extendDecls : extendDecls extendDecl { + if $2 != nil { + $$ = append($1, $2) + } else { + $$ = $1 + } + } + | extendDecl { + if $1 != nil { + $$ = []ast.ExtendElement{$1} + } else { + $$ = nil + } } - | extendItem | { $$ = nil } -extendItem : field { - $$ = []*extendElement{{field: $1}} +extendDecl : extField { + $$ = $1 } | group { - $$ = []*extendElement{{group: $1}} + $$ = $1 } | ';' { - $$ = []*extendElement{{empty: $1}} + $$ = ast.NewEmptyDeclNode($1) } | error ';' { + $$ = nil } | error { + $$ = nil } -service : _SERVICE name '{' serviceBody '}' { - $$ = &serviceNode{name: $2, decls: $4} - $$.setRange($1, $5) +service : _SERVICE name '{' serviceDecls '}' { + $$ = ast.NewServiceNode($1.ToKeyword(), $2, $3, $4, $5) } -serviceBody : serviceBody serviceItem { - $$ = append($1, $2...) +serviceDecls : serviceDecls serviceDecl { + if $2 != nil { + $$ = append($1, $2) + } else { + $$ = $1 + } + } + | serviceDecl { + if $1 != nil { + $$ = []ast.ServiceElement{$1} + } else { + $$ = nil + } } - | serviceItem | { $$ = nil } @@ -822,57 +921,229 @@ serviceBody : serviceBody serviceItem { // NB: doc suggests support for "stream" declaration, separate from "rpc", but // it does not appear to be supported in protoc (doc is likely from grammar for // Google-internal version of protoc, with support for streaming stubby) -serviceItem : option { - $$ = []*serviceElement{{option: $1[0]}} +serviceDecl : option { + $$ = $1 } | rpc { - $$ = []*serviceElement{{rpc: $1}} + $$ = $1 } | ';' { - $$ = []*serviceElement{{empty: $1}} + $$ = ast.NewEmptyDeclNode($1) } | error ';' { + $$ = nil } | error { + $$ = nil } -rpc : _RPC name '(' rpcType ')' _RETURNS '(' rpcType ')' ';' { - $$ = &methodNode{name: $2, input: $4, output: $8} - $$.setRange($1, $10) +rpc : _RPC name rpcType _RETURNS rpcType ';' { + $$ = ast.NewRPCNode($1.ToKeyword(), $2, $3, $4.ToKeyword(), $5, $6) } - | _RPC name '(' rpcType ')' _RETURNS '(' rpcType ')' '{' rpcOptions '}' { - $$ = &methodNode{name: $2, input: $4, output: $8, options: $11} - $$.setRange($1, $12) + | _RPC name rpcType _RETURNS rpcType '{' rpcDecls '}' { + $$ = ast.NewRPCNodeWithBody($1.ToKeyword(), $2, $3, $4.ToKeyword(), $5, $6, $7, $8) } -rpcType : _STREAM typeIdent { - $$ = &rpcTypeNode{msgType: $2, streamKeyword: $1} - $$.setRange($1, $2) +rpcType : '(' _STREAM typeIdent ')' { + $$ = ast.NewRPCTypeNode($1, $2.ToKeyword(), $3, $4) } - | typeIdent { - $$ = &rpcTypeNode{msgType: $1} - $$.setRange($1, $1) + | '(' typeIdent ')' { + $$ = ast.NewRPCTypeNode($1, nil, $2, $3) } -rpcOptions : rpcOptions rpcOption { - $$ = append($1, $2...) +rpcDecls : rpcDecls rpcDecl { + if $2 != nil { + $$ = append($1, $2) + } else { + $$ = $1 + } + } + | rpcDecl { + if $1 != nil { + $$ = []ast.RPCElement{$1} + } else { + $$ = nil + } } - | rpcOption | { - $$ = []*optionNode{} + $$ = nil } -rpcOption : option { +rpcDecl : option { $$ = $1 } | ';' { - $$ = []*optionNode{} + $$ = ast.NewEmptyDeclNode($1) } | error ';' { + $$ = nil } | error { + $$ = nil } +// excludes message, enum, oneof, extensions, reserved, extend, +// option, optional, required, and repeated +msgElementName : _NAME + | _SYNTAX + | _IMPORT + | _WEAK + | _PUBLIC + | _PACKAGE + | _TRUE + | _FALSE + | _INF + | _NAN + | _DOUBLE + | _FLOAT + | _INT32 + | _INT64 + | _UINT32 + | _UINT64 + | _SINT32 + | _SINT64 + | _FIXED32 + | _FIXED64 + | _SFIXED32 + | _SFIXED64 + | _BOOL + | _STRING + | _BYTES + | _GROUP + | _MAP + | _TO + | _MAX + | _SERVICE + | _RPC + | _STREAM + | _RETURNS + +// excludes optional, required, and repeated +extElementName : _NAME + | _SYNTAX + | _IMPORT + | _WEAK + | _PUBLIC + | _PACKAGE + | _OPTION + | _TRUE + | _FALSE + | _INF + | _NAN + | _DOUBLE + | _FLOAT + | _INT32 + | _INT64 + | _UINT32 + | _UINT64 + | _SINT32 + | _SINT64 + | _FIXED32 + | _FIXED64 + | _SFIXED32 + | _SFIXED64 + | _BOOL + | _STRING + | _BYTES + | _GROUP + | _ONEOF + | _MAP + | _EXTENSIONS + | _TO + | _MAX + | _RESERVED + | _ENUM + | _MESSAGE + | _EXTEND + | _SERVICE + | _RPC + | _STREAM + | _RETURNS + +// excludes reserved, option +enumElementName : _NAME + | _SYNTAX + | _IMPORT + | _WEAK + | _PUBLIC + | _PACKAGE + | _TRUE + | _FALSE + | _INF + | _NAN + | _REPEATED + | _OPTIONAL + | _REQUIRED + | _DOUBLE + | _FLOAT + | _INT32 + | _INT64 + | _UINT32 + | _UINT64 + | _SINT32 + | _SINT64 + | _FIXED32 + | _FIXED64 + | _SFIXED32 + | _SFIXED64 + | _BOOL + | _STRING + | _BYTES + | _GROUP + | _ONEOF + | _MAP + | _EXTENSIONS + | _TO + | _MAX + | _ENUM + | _MESSAGE + | _EXTEND + | _SERVICE + | _RPC + | _STREAM + | _RETURNS + +// excludes option, optional, required, and repeated +oneofElementName : _NAME + | _SYNTAX + | _IMPORT + | _WEAK + | _PUBLIC + | _PACKAGE + | _TRUE + | _FALSE + | _INF + | _NAN + | _DOUBLE + | _FLOAT + | _INT32 + | _INT64 + | _UINT32 + | _UINT64 + | _SINT32 + | _SINT64 + | _FIXED32 + | _FIXED64 + | _SFIXED32 + | _SFIXED64 + | _BOOL + | _STRING + | _BYTES + | _GROUP + | _ONEOF + | _MAP + | _EXTENSIONS + | _TO + | _MAX + | _RESERVED + | _ENUM + | _MESSAGE + | _EXTEND + | _SERVICE + | _RPC + | _STREAM + | _RETURNS + name : _NAME | _SYNTAX | _IMPORT diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go index 1c91002a54f..61d9038651e 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/proto.y.go @@ -10,54 +10,68 @@ import __yyfmt__ "fmt" //lint:file-ignore SA4006 generated parser has unused values import ( - "fmt" "math" + + "github.com/jhump/protoreflect/desc/protoparse/ast" ) -//line proto.y:15 +//line proto.y:16 type protoSymType struct { yys int - file *fileNode - fileDecls []*fileElement - syn *syntaxNode - pkg *packageNode - imprt *importNode - msg *messageNode - msgDecls []*messageElement - fld *fieldNode - mapFld *mapFieldNode - mapType *mapTypeNode - grp *groupNode - oo *oneOfNode - ooDecls []*oneOfElement - ext *extensionRangeNode - resvd *reservedNode - en *enumNode - enDecls []*enumElement - env *enumValueNode - extend *extendNode - extDecls []*extendElement - svc *serviceNode - svcDecls []*serviceElement - mtd *methodNode - rpcType *rpcTypeNode - opts []*optionNode - optNm []*optionNamePartNode - cmpctOpts *compactOptionsNode - rngs []*rangeNode - names []*compoundStringNode - cid *compoundIdentNode - sl []valueNode - agg []*aggregateEntryNode - aggName *aggregateNameNode - v valueNode - il *compoundIntNode - str *compoundStringNode - s *stringLiteralNode - i *intLiteralNode - f *floatLiteralNode - id *identNode - b *basicNode + file *ast.FileNode + syn *ast.SyntaxNode + fileDecl ast.FileElement + fileDecls []ast.FileElement + pkg *ast.PackageNode + imprt *ast.ImportNode + msg *ast.MessageNode + msgDecl ast.MessageElement + msgDecls []ast.MessageElement + fld *ast.FieldNode + mapFld *ast.MapFieldNode + mapType *ast.MapTypeNode + grp *ast.GroupNode + oo *ast.OneOfNode + ooDecl ast.OneOfElement + ooDecls []ast.OneOfElement + ext *ast.ExtensionRangeNode + resvd *ast.ReservedNode + en *ast.EnumNode + enDecl ast.EnumElement + enDecls []ast.EnumElement + env *ast.EnumValueNode + extend *ast.ExtendNode + extDecl ast.ExtendElement + extDecls []ast.ExtendElement + svc *ast.ServiceNode + svcDecl ast.ServiceElement + svcDecls []ast.ServiceElement + mtd *ast.RPCNode + rpcType *ast.RPCTypeNode + rpcDecl ast.RPCElement + rpcDecls []ast.RPCElement + opt *ast.OptionNode + opts *compactOptionList + ref *ast.FieldReferenceNode + optNms *fieldRefList + cmpctOpts *ast.CompactOptionsNode + rng *ast.RangeNode + rngs *rangeList + names *nameList + cid *identList + tid ast.IdentValueNode + sl *valueList + msgField *ast.MessageFieldNode + msgEntry *messageFieldEntry + msgLit *messageFieldList + v ast.ValueNode + il ast.IntValueNode + str *stringList + s *ast.StringLiteralNode + i *ast.UintLiteralNode + f *ast.FloatLiteralNode + id *ast.IdentNode + b *ast.RuneNode err error } @@ -189,13 +203,14 @@ var protoToknames = [...]string{ "'~'", "'`'", } + var protoStatenames = [...]string{} const protoEofCode = 1 const protoErrCode = 2 const protoInitialStackSize = 16 -//line proto.y:920 +//line proto.y:1191 //line yacctab:1 var protoExca = [...]int{ @@ -215,302 +230,252 @@ var protoExca = [...]int{ 1, 3, -2, 0, -1, 95, - 55, 161, + 55, 178, -2, 0, -1, 96, - 55, 149, + 55, 166, -2, 0, -1, 97, - 55, 178, + 55, 195, -2, 0, -1, 99, - 55, 187, + 55, 204, -2, 0, -1, 110, - 55, 47, + 55, 53, -2, 0, - -1, 225, - 61, 47, + -1, 289, + 55, 51, + 61, 51, -2, 0, - -1, 240, - 55, 99, + -1, 352, + 61, 53, -2, 0, - -1, 267, - 61, 47, + -1, 367, + 55, 116, -2, 0, - -1, 310, - 61, 47, + -1, 401, + 61, 53, -2, 0, - -1, 347, - 55, 161, + -1, 489, + 61, 53, -2, 0, - -1, 351, - 55, 161, - -2, 0, - -1, 355, - 55, 161, + -1, 533, + 55, 178, -2, 0, - -1, 366, - 61, 47, + -1, 537, + 55, 178, -2, 0, - -1, 368, - 61, 47, + -1, 541, + 55, 178, -2, 0, - -1, 373, - 55, 161, + -1, 559, + 55, 216, -2, 0, - -1, 376, - 55, 161, + -1, 566, + 55, 178, -2, 0, - -1, 379, - 55, 161, + -1, 569, + 55, 178, -2, 0, - -1, 395, - 55, 161, + -1, 572, + 55, 178, -2, 0, - -1, 407, - 55, 161, + -1, 593, + 55, 178, -2, 0, - -1, 412, - 55, 199, + -1, 605, + 55, 178, -2, 0, } const protoPrivate = 57344 -const protoLast = 2738 +const protoLast = 2321 var protoAct = [...]int{ - - 31, 125, 8, 415, 8, 8, 118, 303, 169, 104, - 287, 236, 75, 282, 117, 207, 193, 134, 77, 79, - 80, 106, 84, 206, 8, 105, 107, 124, 119, 156, - 162, 143, 146, 192, 395, 76, 82, 324, 81, 170, - 339, 340, 26, 393, 235, 323, 353, 238, 338, 355, - 351, 85, 237, 30, 88, 89, 337, 349, 238, 238, - 347, 238, 238, 238, 345, 331, 321, 298, 271, 270, - 408, 208, 238, 238, 335, 116, 382, 297, 200, 238, - 238, 238, 238, 29, 208, 208, 263, 363, 91, 407, - 223, 110, 103, 109, 77, 222, 94, 152, 147, 225, - 222, 163, 421, 221, 401, 222, 379, 302, 221, 399, - 222, 174, 405, 221, 370, 90, 376, 377, 221, 344, - 98, 373, 91, 222, 183, 158, 157, 240, 154, 115, - 209, 221, 314, 87, 87, 191, 80, 79, 99, 97, - 77, 195, 196, 209, 209, 260, 152, 147, 243, 185, - 187, 189, 242, 259, 222, 81, 244, 220, 325, 92, - 237, 166, 221, 163, 96, 219, 93, 93, 216, 199, - 174, 411, 17, 412, 95, 87, 197, 154, 202, 87, - 199, 102, 101, 158, 157, 211, 228, 229, 230, 231, - 232, 233, 214, 226, 14, 224, 166, 418, 374, 371, - 4, 15, 418, 362, 16, 17, 167, 17, 17, 218, - 357, 165, 334, 17, 213, 328, 279, 215, 212, 203, - 258, 184, 168, 100, 109, 25, 174, 86, 361, 360, - 277, 276, 275, 274, 268, 19, 18, 20, 21, 77, - 273, 167, 288, 284, 13, 266, 165, 417, 272, 264, - 419, 283, 417, 241, 280, 234, 204, 24, 293, 364, - 261, 239, 381, 5, 29, 194, 109, 23, 174, 174, - 299, 296, 380, 181, 179, 309, 311, 301, 320, 285, - 319, 305, 318, 300, 180, 29, 23, 317, 288, 316, - 322, 27, 28, 315, 329, 330, 295, 327, 122, 11, - 194, 11, 11, 14, 293, 178, 176, 332, 218, 278, - 15, 174, 174, 16, 17, 333, 177, 262, 87, 341, - 29, 11, 336, 164, 161, 77, 109, 346, 348, 350, - 352, 354, 356, 3, 12, 359, 22, 283, 358, 109, - 109, 155, 174, 148, 19, 18, 20, 21, 365, 367, - 145, 218, 123, 13, 205, 120, 10, 149, 10, 10, - 121, 9, 372, 9, 9, 128, 375, 174, 198, 174, - 378, 286, 141, 127, 290, 383, 126, 385, 10, 183, - 289, 218, 183, 9, 174, 183, 174, 171, 388, 307, - 397, 390, 394, 396, 392, 183, 173, 183, 245, 183, - 305, 108, 414, 281, 7, 6, 2, 1, 0, 0, - 406, 0, 0, 183, 416, 0, 416, 183, 420, 0, - 0, 0, 410, 218, 308, 218, 29, 114, 111, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 0, 0, 0, 0, 110, 0, 0, 0, - 0, 0, 0, 0, 310, 113, 112, 0, 0, 0, - 306, 29, 114, 111, 32, 33, 34, 35, 36, 37, + 31, 118, 117, 125, 8, 396, 8, 8, 488, 81, + 486, 414, 363, 288, 579, 421, 328, 107, 77, 79, + 80, 82, 84, 326, 317, 311, 8, 106, 105, 124, + 281, 228, 139, 26, 177, 593, 416, 591, 30, 525, + 555, 553, 85, 551, 541, 87, 88, 89, 364, 539, + 537, 535, 364, 531, 75, 364, 364, 364, 364, 533, + 524, 519, 512, 364, 364, 500, 364, 473, 364, 362, + 502, 405, 364, 560, 404, 329, 364, 364, 523, 397, + 364, 329, 364, 349, 364, 29, 329, 116, 94, 350, + 110, 348, 563, 109, 77, 320, 562, 584, 352, 349, + 178, 104, 550, 282, 529, 98, 349, 348, 530, 492, + 349, 293, 347, 103, 348, 493, 583, 477, 348, 302, + 346, 526, 503, 472, 388, 371, 365, 230, 511, 185, + 115, 339, 321, 93, 330, 310, 91, 314, 315, 344, + 330, 304, 306, 308, 14, 330, 285, 582, 605, 572, + 4, 15, 569, 566, 16, 17, 318, 17, 17, 558, + 367, 559, 14, 319, 316, 285, 582, 99, 97, 15, + 96, 95, 16, 17, 603, 597, 17, 17, 577, 576, + 178, 575, 570, 567, 564, 19, 18, 20, 21, 557, + 549, 286, 543, 515, 13, 507, 284, 581, 413, 340, + 595, 387, 318, 19, 18, 20, 21, 370, 369, 185, + 286, 323, 13, 342, 333, 284, 581, 324, 303, 287, + 102, 101, 100, 90, 86, 25, 547, 546, 504, 480, + 479, 478, 411, 410, 409, 408, 407, 337, 406, 394, + 368, 361, 325, 334, 335, 336, 92, 24, 482, 418, + 389, 366, 122, 11, 574, 11, 11, 230, 573, 332, + 338, 373, 374, 375, 376, 377, 378, 379, 380, 381, + 382, 383, 384, 120, 10, 11, 10, 10, 121, 9, + 518, 9, 9, 517, 282, 419, 516, 343, 29, 5, + 293, 300, 298, 23, 27, 28, 10, 29, 313, 499, + 498, 9, 299, 345, 353, 355, 356, 357, 358, 359, + 360, 341, 23, 297, 295, 497, 496, 495, 494, 481, + 351, 470, 313, 412, 296, 390, 29, 3, 283, 280, + 22, 12, 227, 179, 176, 391, 392, 393, 123, 327, + 312, 180, 386, 385, 128, 420, 137, 127, 424, 126, + 229, 109, 119, 293, 423, 289, 290, 427, 236, 134, + 429, 395, 237, 140, 183, 77, 402, 431, 239, 143, + 372, 422, 108, 292, 76, 578, 415, 7, 400, 398, + 6, 2, 1, 0, 0, 0, 0, 0, 0, 318, + 0, 417, 0, 0, 0, 0, 471, 0, 0, 474, + 109, 0, 293, 0, 0, 0, 476, 484, 0, 0, + 0, 0, 475, 0, 0, 490, 0, 0, 0, 0, + 0, 0, 0, 0, 422, 501, 0, 0, 508, 509, + 0, 0, 0, 0, 426, 0, 506, 0, 0, 432, + 433, 434, 435, 436, 437, 17, 438, 439, 440, 441, + 0, 0, 510, 442, 443, 444, 445, 446, 447, 448, + 449, 450, 451, 452, 453, 454, 455, 456, 428, 457, + 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, + 468, 469, 514, 513, 425, 0, 0, 505, 521, 0, + 293, 430, 0, 522, 520, 0, 0, 0, 0, 0, + 0, 0, 0, 527, 77, 109, 0, 532, 534, 536, + 538, 540, 542, 545, 0, 544, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 109, 0, 552, + 554, 556, 0, 548, 0, 0, 565, 561, 0, 0, + 568, 0, 0, 0, 571, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 580, 0, 0, 0, 302, 0, 586, + 302, 0, 588, 302, 0, 590, 0, 0, 0, 0, + 0, 0, 580, 0, 109, 109, 592, 594, 302, 0, + 302, 0, 302, 596, 598, 599, 604, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 302, 0, 607, 302, + 487, 0, 29, 114, 111, 32, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, + 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, + 0, 0, 110, 0, 0, 0, 0, 0, 0, 0, + 489, 113, 112, 0, 0, 0, 485, 29, 114, 111, + 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, + 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, + 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, + 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 74, 0, 0, 0, 0, 110, 0, 0, + 0, 0, 0, 0, 0, 401, 113, 112, 0, 0, + 399, 29, 114, 111, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, - 0, 110, 0, 0, 0, 0, 0, 0, 0, 267, - 113, 112, 0, 0, 265, 29, 114, 111, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, - 74, 0, 0, 0, 0, 110, 0, 0, 0, 0, - 0, 0, 0, 368, 113, 112, 29, 114, 111, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 0, 0, 0, 0, 110, 0, 0, 0, - 0, 0, 172, 0, 366, 113, 112, 32, 33, 34, - 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, - 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 400, 0, 172, 0, 0, 0, 175, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, - 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 398, 0, 172, 0, 0, 0, 175, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 369, 0, 0, 0, 0, 0, 175, - 29, 114, 111, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, - 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, - 110, 0, 0, 0, 0, 0, 172, 0, 0, 113, - 112, 32, 33, 34, 35, 36, 37, 38, 39, 40, - 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, - 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, - 71, 72, 73, 74, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 343, 0, 172, 0, 0, - 0, 175, 32, 33, 34, 35, 36, 37, 38, 39, - 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 313, 0, 172, 0, - 0, 0, 175, 32, 33, 34, 35, 36, 37, 38, + 0, 110, 0, 0, 0, 0, 0, 0, 0, 489, + 113, 112, 29, 114, 111, 32, 33, 34, 35, 36, + 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, + 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, + 0, 0, 110, 0, 0, 0, 0, 0, 528, 0, + 0, 113, 112, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, - 0, 217, 0, 0, 0, 0, 0, 0, 0, 386, - 0, 0, 0, 175, 32, 33, 34, 35, 36, 37, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 491, + 0, 0, 0, 294, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 72, 73, 74, 246, 247, 248, - 249, 250, 251, 252, 253, 254, 255, 256, 257, 0, - 384, 0, 0, 0, 175, 32, 33, 34, 35, 36, + 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 403, 0, 0, 0, 294, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 342, 0, 0, 0, 175, 32, 33, 34, 35, + 0, 291, 0, 0, 0, 294, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 312, 0, 0, 0, 175, 32, 33, 34, + 0, 0, 0, 0, 0, 0, 294, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 269, 0, 0, 0, 175, 32, 33, - 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, - 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, - 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, - 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 172, 0, 0, 0, 175, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 175, - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, - 72, 73, 74, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 130, 0, 0, 0, 78, 32, - 33, 34, 35, 36, 37, 139, 39, 40, 41, 42, - 133, 132, 131, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 140, - 144, 138, 65, 66, 142, 135, 136, 137, 71, 72, - 73, 74, 0, 0, 129, 0, 0, 413, 130, 0, - 0, 83, 0, 32, 33, 34, 35, 36, 37, 139, - 39, 40, 41, 42, 133, 132, 131, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, 61, 140, 144, 138, 65, 66, 142, 135, - 136, 137, 71, 72, 73, 74, 0, 0, 129, 0, - 0, 409, 130, 0, 0, 83, 0, 32, 33, 34, - 35, 36, 37, 139, 39, 40, 41, 42, 133, 132, - 131, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 140, 144, 138, - 65, 66, 142, 135, 136, 137, 71, 72, 73, 74, - 0, 0, 129, 0, 0, 404, 130, 0, 0, 83, - 0, 32, 33, 34, 35, 36, 37, 139, 39, 40, - 41, 42, 133, 132, 131, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, - 61, 140, 144, 138, 65, 66, 142, 135, 136, 137, - 71, 72, 73, 74, 0, 0, 129, 0, 0, 403, - 130, 0, 0, 83, 0, 32, 33, 34, 35, 36, - 37, 139, 39, 40, 41, 42, 133, 132, 131, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 140, 144, 138, 65, 66, - 142, 135, 136, 137, 71, 72, 73, 74, 0, 0, - 129, 0, 0, 402, 130, 0, 0, 83, 0, 32, - 33, 34, 35, 36, 37, 139, 39, 40, 41, 42, - 133, 132, 131, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 140, - 144, 138, 65, 66, 142, 135, 136, 137, 71, 72, - 73, 74, 0, 0, 129, 0, 0, 391, 130, 0, - 0, 83, 0, 32, 33, 34, 35, 36, 37, 139, - 39, 40, 41, 42, 133, 132, 131, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, 61, 140, 144, 138, 65, 66, 142, 135, - 136, 137, 71, 72, 73, 74, 0, 0, 129, 0, - 0, 389, 130, 0, 0, 83, 0, 32, 33, 34, - 35, 36, 37, 139, 39, 40, 41, 42, 133, 132, - 131, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 140, 144, 138, - 65, 66, 142, 135, 136, 137, 71, 72, 73, 74, - 0, 0, 129, 0, 0, 387, 292, 0, 0, 83, - 0, 32, 33, 34, 35, 36, 37, 139, 39, 40, - 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, - 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, - 294, 62, 63, 64, 65, 66, 67, 68, 69, 70, - 71, 72, 73, 74, 0, 0, 291, 0, 0, 326, - 160, 0, 0, 83, 0, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 133, 132, 131, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, - 159, 0, 0, 210, 130, 0, 0, 83, 0, 32, - 33, 34, 35, 36, 37, 139, 39, 40, 41, 42, - 133, 132, 131, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 140, - 144, 138, 65, 66, 142, 135, 136, 137, 71, 72, - 73, 74, 0, 0, 129, 0, 0, 182, 130, 0, - 0, 83, 0, 32, 33, 34, 35, 36, 37, 139, - 39, 40, 41, 42, 133, 132, 131, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, - 59, 60, 61, 140, 144, 138, 65, 66, 142, 135, - 136, 137, 71, 72, 73, 74, 0, 0, 129, 0, - 0, 292, 0, 0, 0, 83, 32, 33, 34, 35, - 36, 37, 139, 39, 40, 41, 42, 43, 44, 45, - 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 294, 62, 63, 64, 65, - 66, 67, 68, 69, 70, 71, 72, 73, 74, 0, - 0, 291, 0, 0, 160, 0, 0, 0, 83, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, - 133, 132, 131, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, - 73, 74, 0, 0, 159, 0, 0, 227, 0, 0, - 0, 83, 32, 33, 34, 35, 36, 37, 38, 39, + 0, 130, 0, 0, 0, 78, 144, 145, 146, 147, + 148, 149, 17, 150, 151, 152, 153, 133, 132, 131, + 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 136, 142, 135, 170, + 171, 138, 19, 18, 20, 172, 173, 174, 175, 0, + 0, 129, 0, 0, 608, 130, 0, 0, 141, 0, + 144, 145, 146, 147, 148, 149, 17, 150, 151, 152, + 153, 133, 132, 131, 154, 155, 156, 157, 158, 159, + 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, + 136, 142, 135, 170, 171, 138, 19, 18, 20, 172, + 173, 174, 175, 0, 0, 129, 0, 0, 606, 130, + 0, 0, 141, 0, 144, 145, 146, 147, 148, 149, + 17, 150, 151, 152, 153, 133, 132, 131, 154, 155, + 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, + 166, 167, 168, 169, 136, 142, 135, 170, 171, 138, + 19, 18, 20, 172, 173, 174, 175, 0, 0, 129, + 0, 0, 602, 130, 0, 0, 141, 0, 144, 145, + 146, 147, 148, 149, 17, 150, 151, 152, 153, 133, + 132, 131, 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, 136, 142, + 135, 170, 171, 138, 19, 18, 20, 172, 173, 174, + 175, 0, 0, 129, 0, 0, 601, 130, 0, 0, + 141, 0, 144, 145, 146, 147, 148, 149, 17, 150, + 151, 152, 153, 133, 132, 131, 154, 155, 156, 157, + 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 136, 142, 135, 170, 171, 138, 19, 18, + 20, 172, 173, 174, 175, 0, 0, 129, 0, 0, + 600, 130, 0, 0, 141, 0, 144, 145, 146, 147, + 148, 149, 17, 150, 151, 152, 153, 133, 132, 131, + 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 136, 142, 135, 170, + 171, 138, 19, 18, 20, 172, 173, 174, 175, 0, + 0, 129, 0, 0, 589, 130, 0, 0, 141, 0, + 144, 145, 146, 147, 148, 149, 17, 150, 151, 152, + 153, 133, 132, 131, 154, 155, 156, 157, 158, 159, + 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, + 136, 142, 135, 170, 171, 138, 19, 18, 20, 172, + 173, 174, 175, 0, 0, 129, 0, 0, 587, 130, + 0, 0, 141, 0, 144, 145, 146, 147, 148, 149, + 17, 150, 151, 152, 153, 133, 132, 131, 154, 155, + 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, + 166, 167, 168, 169, 136, 142, 135, 170, 171, 138, + 19, 18, 20, 172, 173, 174, 175, 0, 0, 129, + 0, 0, 585, 232, 0, 0, 141, 0, 240, 241, + 242, 243, 244, 245, 246, 247, 248, 249, 250, 235, + 234, 233, 251, 252, 253, 254, 255, 256, 257, 258, + 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, + 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, + 279, 0, 0, 231, 0, 0, 331, 130, 0, 0, + 238, 0, 144, 145, 146, 147, 148, 149, 17, 150, + 151, 152, 153, 133, 132, 131, 154, 155, 156, 157, + 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 136, 142, 135, 170, 171, 138, 19, 18, + 20, 172, 173, 174, 175, 0, 0, 129, 0, 0, + 301, 130, 0, 0, 141, 0, 144, 145, 146, 147, + 148, 149, 17, 150, 151, 152, 153, 133, 132, 131, + 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 136, 142, 135, 170, + 171, 138, 19, 18, 20, 172, 173, 174, 175, 0, + 0, 129, 0, 0, 232, 0, 0, 0, 141, 240, + 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, + 235, 234, 233, 251, 252, 253, 254, 255, 256, 257, + 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, + 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, + 278, 279, 0, 0, 231, 0, 0, 354, 0, 0, + 0, 238, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, @@ -519,250 +484,345 @@ var protoAct = [...]int{ 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 304, 74, 0, 0, + 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, - 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, + 64, 65, 66, 67, 68, 69, 70, 71, 72, 483, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, - 190, 62, 63, 64, 65, 66, 67, 68, 69, 70, + 309, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, - 58, 59, 60, 188, 62, 63, 64, 65, 66, 67, + 58, 59, 60, 307, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 83, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, - 55, 56, 57, 58, 59, 60, 186, 62, 63, 64, + 55, 56, 57, 58, 59, 60, 305, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, - 0, 0, 0, 0, 0, 151, 0, 0, 0, 83, - 32, 33, 34, 35, 36, 37, 139, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 153, 68, 69, 70, 71, - 72, 73, 74, 0, 151, 150, 0, 0, 201, 32, - 33, 34, 35, 36, 37, 139, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 63, 64, 65, 66, 153, 68, 69, 70, 71, 72, - 73, 74, 0, 0, 150, 32, 33, 34, 35, 36, - 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, - 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 72, 73, 74, + 0, 0, 0, 0, 0, 426, 0, 0, 0, 83, + 432, 433, 434, 435, 436, 437, 17, 438, 439, 440, + 441, 0, 0, 0, 442, 443, 444, 445, 446, 447, + 448, 449, 450, 451, 452, 453, 454, 455, 456, 428, + 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, + 467, 468, 469, 0, 0, 425, 0, 0, 182, 0, + 0, 0, 430, 186, 187, 188, 189, 190, 191, 17, + 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, + 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, + 212, 213, 214, 215, 216, 217, 218, 219, 184, 220, + 221, 222, 223, 224, 225, 226, 0, 182, 181, 0, + 0, 322, 186, 187, 188, 189, 190, 191, 17, 192, + 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, + 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, + 213, 214, 215, 216, 217, 218, 219, 184, 220, 221, + 222, 223, 224, 225, 226, 0, 0, 181, 32, 33, + 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, + 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, + 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, + 74, } -var protoPact = [...]int{ - 192, -1000, 301, 301, 206, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, 173, 281, 2688, 1463, 2688, 2688, - 2381, 2688, 301, -1000, 316, -1000, 175, 316, 316, -1000, - 63, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, +var protoPact = [...]int{ + 142, -1000, 160, 160, 196, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, 173, 284, 2271, 1100, 2271, 2271, + 1858, 2271, 160, -1000, 322, -1000, 172, 322, 322, 322, + 171, 77, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, 195, 74, -1000, 1858, 117, + 116, 114, -1000, 2271, 113, 170, -1000, 169, 168, -1000, + -1000, 2271, 798, 1100, 21, 1699, 2225, 1752, -1000, 163, + -1000, -1000, -1000, -1000, 167, -1000, -1000, -1000, -1000, -1000, + 1039, -1000, 308, 286, -1000, -1000, -1000, 1645, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + 166, 2070, 2017, 1964, 2271, 317, 2271, 2271, 293, -1000, + -1000, 2271, 33, 73, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, 2176, -1000, -1000, -1000, + -1000, -1000, 165, 191, 81, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, 1591, -1000, -1000, + -1000, -1000, 162, 2070, 2017, 1964, 2271, -1000, 2271, 72, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, 108, -1000, -1000, 2381, 120, - 110, 85, 29, 2688, 84, 171, -1000, -1000, 130, 129, - -1000, 2688, 856, 1463, 9, 2116, 2642, 2222, 29, 194, - -1000, -1000, -1000, -1000, 170, -1000, -1000, 314, -1000, -1000, - 1402, -1000, 300, 268, -1000, -1000, -1000, 2062, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - 169, 2540, 2487, 2434, 2688, 2688, 2688, 2381, 295, 1463, - 2688, 2688, 260, -1000, 16, 2593, -1000, -1000, -1000, -1000, - -1000, 167, 205, 79, -1000, 2008, -1000, -1000, -1000, -1000, - 166, 159, -1000, -1000, -1000, -1000, 165, 2688, -1000, 1036, - -1000, 105, 102, 37, -1000, 2275, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, 2688, 2688, 2688, 2688, 2688, - 2688, 204, -8, -1000, 221, 73, 202, 100, 96, 314, - 1124, -1000, -1000, -1000, 80, 93, -1000, 220, -1000, 312, - -1000, -1000, -1000, -1000, -1000, -1000, 21, -1000, -1000, -1000, - -1000, -1000, -1000, 487, -1000, 1341, 1, 0, 197, 189, - 182, 181, 180, 179, 304, -1000, 164, 295, 1463, 238, - 2169, 291, -1000, -1000, 316, 17, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 15, 80, - -1000, 66, -1000, 2328, -1000, 422, -1000, 1280, 975, 71, - -1000, -1000, 288, 284, 282, 277, 275, 273, 14, -1000, - -1000, -23, -1000, 107, -1000, -1000, 1954, -1000, -1000, -1000, - -1000, -1000, 163, 2688, 2688, 13, 314, 2381, -1000, 160, - -1000, -1000, -1000, 8, 2381, -1000, -1000, -12, -27, -1000, - 1219, 914, 58, -1000, -1000, 12, 6, 5, -4, -6, - -5, -1000, 158, -1000, 1463, 856, -1000, -1000, -1000, 178, - 177, -1000, 151, 26, -1000, 210, -1000, -1000, 612, 551, - -1000, 792, 53, -1000, -1000, -1000, 147, 2116, 67, -1000, - 146, 2116, 62, -1000, 65, 2116, 52, -1000, -1000, -1000, - 267, 257, -1000, -1000, 11, -1000, 1158, -1000, 1097, -1000, - -1000, -1000, 1900, 2116, -1000, 1846, 2116, -1000, 1792, 2116, - -9, -20, 2328, 731, 48, 670, 43, -1000, 1738, -1000, - 1684, -1000, 1630, -1000, 60, 2116, 35, 4, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, 1576, 2116, 119, -1000, - 1522, -1000, 200, -1000, 195, -1000, -1000, -1000, 50, -1000, - -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + 144, -1000, -1000, -1000, -1000, 161, 2271, -1000, 84, 1039, + 60, 58, 36, -1000, 1805, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, 2271, 2271, 2271, 2271, 2271, 2271, + 190, 17, 66, 211, 106, 189, 156, 155, 65, -1000, + 238, 2271, -1000, -1000, -1000, 70, 149, 64, 210, -1000, + 320, -1000, -1000, -1000, 2271, 2271, 2271, 188, -1000, 2271, + -1000, -1000, -1000, 14, -1000, -1000, -1000, -1000, -1000, -1000, + 673, -1000, 978, 6, 3, 187, 185, 184, 183, 182, + 181, 318, -1000, 146, 1100, 317, 244, 2123, 316, -1000, + -1000, 322, 63, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, 15, -1000, 70, 76, + -1000, 180, 179, 178, 314, -1000, 199, 1911, -1000, 608, + -1000, 917, 48, 54, -1000, -1000, 313, 312, 311, 310, + 295, 294, 13, -1000, 2, 62, 177, -1000, -1000, -1000, + 432, -1000, -1000, -1000, -1000, -1000, 143, 2271, 2271, -1000, + 2271, 69, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + 10, -1000, 1858, -1000, 141, -1000, -1000, -1000, 281, 278, + 275, 9, 14, 1858, 12, -1000, -8, -29, 61, 856, + 43, 47, -1000, -1000, 1, 5, -1, -4, -3, -10, + -1000, 140, -1000, 1100, 798, -1000, -1000, -1000, 176, 175, + -1000, 2271, -1000, 138, 41, -1000, -9, -11, -12, -1000, + 137, 107, 7, -1000, -1000, -1000, 737, 35, 31, -1000, + -1000, -1000, 132, 1699, 99, -1000, 131, 1699, 98, -1000, + 130, 1699, 95, -1000, -1000, -1000, 253, 249, -1000, -1000, + -1000, -1000, 129, -1000, 127, -1000, 126, -1000, -1000, 164, + -1000, -1000, 56, 37, -1000, 1537, 1699, -1000, 1483, 1699, + -1000, 1429, 1699, -15, -19, -1000, -1000, -1000, 145, -1000, + -1000, -1000, 123, 737, 737, -1000, 1375, -1000, 1321, -1000, + 1267, -1000, 122, 1699, 94, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, 1213, 1699, -1000, 1159, -1000, } -var protoPgo = [...]int{ - 0, 407, 406, 263, 333, 405, 404, 1, 13, 403, - 3, 402, 12, 35, 11, 9, 25, 21, 401, 15, - 0, 398, 36, 17, 396, 389, 8, 39, 387, 28, - 380, 376, 27, 374, 373, 372, 360, 6, 14, 10, - 371, 368, 365, 357, 31, 16, 33, 23, 354, 352, - 355, 32, 350, 343, 298, 29, 341, 26, 334, 30, - 324, 323, 7, +var protoPgo = [...]int{ + 0, 382, 381, 289, 327, 380, 377, 3, 376, 11, + 14, 375, 374, 373, 36, 12, 8, 28, 27, 372, + 16, 0, 370, 369, 368, 367, 364, 21, 363, 362, + 360, 9, 359, 358, 357, 10, 356, 355, 13, 354, + 352, 350, 349, 29, 348, 347, 346, 278, 1, 2, + 15, 345, 24, 344, 341, 32, 340, 339, 25, 23, + 338, 273, 34, 334, 333, 252, 31, 332, 17, 331, + 30, 329, 328, 5, } -var protoR1 = [...]int{ +var protoR1 = [...]int{ 0, 1, 1, 1, 1, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 5, 5, - 5, 6, 22, 22, 7, 12, 12, 13, 13, 15, - 15, 16, 16, 16, 18, 18, 18, 18, 18, 18, - 18, 18, 57, 57, 17, 26, 26, 26, 27, 27, - 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, - 28, 28, 28, 28, 24, 24, 24, 25, 25, 25, - 25, 25, 25, 25, 25, 25, 23, 23, 29, 29, - 29, 29, 29, 29, 29, 29, 14, 9, 9, 8, - 32, 32, 32, 32, 32, 32, 31, 40, 40, 40, - 39, 39, 39, 39, 39, 39, 30, 30, 33, 33, - 34, 34, 35, 21, 21, 21, 21, 21, 21, 21, - 21, 21, 21, 21, 21, 49, 49, 46, 46, 45, - 45, 45, 48, 48, 47, 47, 47, 19, 19, 42, - 42, 43, 43, 44, 41, 41, 50, 52, 52, 52, - 51, 51, 51, 51, 51, 51, 53, 53, 36, 38, - 38, 38, 37, 37, 37, 37, 37, 37, 37, 37, - 37, 37, 37, 37, 37, 54, 56, 56, 56, 55, - 55, 55, 55, 55, 58, 60, 60, 60, 59, 59, - 59, 59, 59, 61, 61, 62, 62, 11, 11, 11, - 10, 10, 10, 10, 20, 20, 20, 20, 20, 20, - 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, - 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, - 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, - 20, 20, 20, 20, 20, 20, 20, + 5, 6, 27, 27, 28, 28, 29, 29, 30, 30, + 7, 14, 14, 12, 12, 16, 16, 17, 17, 17, + 19, 19, 19, 19, 19, 19, 19, 19, 68, 68, + 18, 38, 38, 38, 37, 37, 37, 37, 37, 37, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 13, 13, 13, 35, 35, 35, 35, 35, 35, 31, + 31, 32, 32, 33, 33, 34, 34, 40, 40, 40, + 40, 40, 40, 40, 40, 41, 41, 41, 41, 41, + 41, 41, 41, 15, 9, 9, 8, 43, 43, 43, + 43, 43, 43, 42, 51, 51, 51, 50, 50, 50, + 50, 50, 50, 39, 39, 44, 44, 45, 45, 46, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 60, 60, 58, 58, 56, 56, 56, 59, + 59, 57, 57, 57, 20, 20, 53, 53, 54, 54, + 55, 52, 52, 61, 63, 63, 63, 62, 62, 62, + 62, 62, 62, 64, 64, 47, 49, 49, 49, 48, + 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, + 48, 48, 65, 67, 67, 67, 66, 66, 66, 66, + 66, 69, 71, 71, 71, 70, 70, 70, 70, 70, + 72, 72, 73, 73, 11, 11, 11, 10, 10, 10, + 10, 23, 23, 23, 23, 23, 23, 23, 23, 23, + 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, + 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, + 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, + 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, + 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, + 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, + 24, 24, 24, 24, 26, 26, 26, 26, 26, 26, + 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, + 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, + 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, + 26, 26, 26, 26, 26, 25, 25, 25, 25, 25, + 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, + 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, + 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, + 25, 25, 25, 25, 21, 21, 21, 21, 21, 21, + 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, + 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, + 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, + 21, 21, 21, 21, 21, 21, 21, } -var protoR2 = [...]int{ +var protoR2 = [...]int{ 0, 1, 1, 2, 0, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 4, 3, 4, - 4, 3, 1, 3, 5, 1, 3, 1, 3, 1, - 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, - 2, 2, 1, 2, 3, 1, 2, 0, 1, 2, - 2, 2, 2, 1, 3, 4, 5, 5, 3, 2, - 5, 4, 5, 4, 1, 3, 3, 1, 3, 3, - 3, 5, 5, 3, 5, 5, 1, 2, 6, 6, - 6, 5, 7, 7, 7, 6, 3, 3, 1, 3, - 8, 8, 8, 9, 9, 9, 5, 2, 1, 0, - 1, 1, 1, 1, 2, 1, 5, 6, 7, 8, - 5, 6, 6, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 3, 4, 3, 1, 1, - 3, 3, 3, 1, 1, 3, 3, 1, 2, 3, - 1, 3, 1, 3, 3, 1, 5, 2, 1, 0, - 1, 1, 1, 1, 2, 1, 4, 5, 5, 2, - 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 2, 1, 5, 2, 1, 0, 1, - 1, 1, 2, 1, 5, 2, 1, 0, 1, 1, - 1, 2, 1, 10, 12, 2, 1, 2, 1, 0, - 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, + 4, 3, 1, 3, 1, 3, 1, 3, 1, 3, + 5, 1, 3, 1, 3, 1, 1, 1, 1, 1, + 1, 2, 2, 2, 2, 1, 2, 2, 1, 2, + 3, 1, 2, 0, 1, 2, 2, 2, 2, 1, + 3, 4, 5, 5, 3, 2, 5, 4, 5, 4, + 1, 3, 3, 1, 3, 3, 5, 3, 5, 1, + 2, 1, 2, 1, 2, 1, 2, 6, 6, 6, + 7, 7, 7, 5, 6, 6, 6, 6, 7, 7, + 7, 5, 6, 3, 1, 3, 3, 8, 8, 8, + 9, 9, 9, 5, 2, 1, 0, 1, 1, 1, + 1, 2, 1, 5, 6, 7, 8, 5, 6, 6, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 3, 4, 1, 3, 1, 3, 3, 1, + 3, 1, 3, 3, 1, 2, 3, 1, 3, 1, + 3, 1, 3, 5, 2, 1, 0, 1, 1, 1, + 1, 2, 1, 4, 5, 5, 2, 1, 0, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 2, 1, 5, 2, 1, 0, 1, 1, 1, 2, + 1, 5, 2, 1, 0, 1, 1, 1, 2, 1, + 6, 8, 4, 3, 2, 1, 0, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, } -var protoChk = [...]int{ - -1000, -1, -2, -4, 8, -3, -5, -6, -7, -36, - -50, -54, -58, 52, 2, 9, 12, 13, 44, 43, - 45, 46, -4, -3, 51, 52, -57, 10, 11, 4, - -22, -20, 7, 8, 9, 10, 11, 12, 13, 14, +var protoChk = [...]int{ + -1000, -1, -2, -4, 8, -3, -5, -6, -7, -47, + -61, -65, -69, 52, 2, 9, 12, 13, 44, 43, + 45, 46, -4, -3, 51, 52, -68, 10, 11, 4, + -27, -21, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48, 49, -12, -13, -20, 65, -20, - -20, -23, -22, 59, -20, -57, 52, 4, -57, -57, - 52, 59, 51, 59, -23, 54, 54, 54, -22, 54, - 52, 52, 52, -20, -15, -16, -17, -57, -18, -20, - 54, 6, 64, 63, 5, -13, 66, -38, -37, -29, - -50, -36, -54, -49, -32, -7, -31, -34, -42, 52, - 2, 20, 19, 18, -23, 43, 44, 45, 39, 13, - 37, -35, 42, -44, 38, -52, -51, -7, -53, -43, - 52, 2, -20, 42, -44, -56, -55, -29, -32, 52, - 2, -60, -59, -7, -61, 52, 2, 47, 52, -26, - -27, -28, 2, -24, -20, 67, 6, 16, 5, 6, - 16, 5, 55, -37, 52, -23, 36, -23, 36, -23, - 36, -20, -46, -45, 5, -20, -20, -46, -41, -57, - 62, 55, -51, 52, 51, -48, -47, -19, 5, 64, - 55, -55, 52, 55, -59, 52, -20, 55, -27, 60, - 52, 60, 52, 53, -17, 62, -23, 2, -20, -20, - -20, -20, -20, -20, 51, 52, -14, 60, 67, 40, - 54, 51, 52, 52, 60, -21, 23, 24, 25, 26, - 27, 28, 29, 30, 31, 32, 33, 34, -19, 60, - 52, 40, 5, 65, -16, 67, -17, 62, -26, 2, - 68, 68, 51, 51, 51, 51, 51, 51, 5, 52, - -45, -9, -8, -12, 5, 41, -40, -39, -7, -30, - -33, 52, 2, -23, 36, 5, -57, 60, 52, -14, - -47, -19, 41, -62, 48, -23, 68, -25, 2, -15, - 62, -26, 2, 61, 61, 5, 5, 5, 5, 5, - 5, 52, -14, 68, 60, 51, 55, -39, 52, -20, - -20, 52, -14, -23, 52, 66, -23, 68, 60, 52, - 68, -26, 2, 61, 61, 52, -14, 54, -14, 52, - -14, 54, -14, 52, -14, 54, -14, 52, -8, -15, - 51, 51, 52, 61, 49, -15, 62, -15, 62, 61, - 61, 52, -38, 54, 52, -38, 54, 52, -38, 54, - 5, 5, 65, -26, 2, -26, 2, 55, -38, 55, - -38, 55, -38, 52, -14, 54, -14, -62, 61, 61, - 61, 61, 55, 55, 55, 52, -38, 54, 66, 55, - -38, 52, 54, 55, -11, -10, -7, 52, 2, 55, - -10, 52, + 45, 46, 47, 48, 49, -14, -12, -21, 65, -21, + -21, -31, -27, 59, -21, -68, 52, -68, -68, -68, + 52, 59, 51, 59, -31, 54, 54, 54, -27, 54, + 52, 52, 52, -27, -16, -17, -18, -68, -19, -21, + 54, 6, 64, 63, 5, -14, 66, -49, -48, -40, + -61, -47, -65, -60, -43, -7, -42, -45, -53, 52, + 2, 20, 19, 18, -32, 39, 37, -46, 42, -55, + -28, 59, 38, -23, 7, 8, 9, 10, 11, 12, + 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, + 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, + 40, 41, 46, 47, 48, 49, -63, -62, -7, -64, + -54, 52, 2, -26, 42, -55, 7, 8, 9, 10, + 11, 12, 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, + 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, + 43, 44, 45, 46, 47, 48, 49, -67, -66, -41, + -43, 52, 2, 20, 19, 18, -33, -29, 59, -24, + 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, + 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, + -71, -70, -7, -72, 52, 2, 47, 52, -38, -37, + -36, 2, -13, -21, 67, 6, 16, 5, 6, 16, + 5, 55, -48, 52, -31, 36, -31, 36, -31, 36, + -21, -58, -56, 5, -21, -21, -58, -52, -68, -27, + 62, 59, 55, -62, 52, 51, -59, -57, -20, 5, + 64, 55, -66, 52, -31, -31, -31, -21, -27, 59, + 55, -70, 52, -21, 55, -38, 60, 52, 60, 52, + 53, -18, 62, -31, 2, -21, -21, -21, -21, -21, + -21, 51, 52, -15, 67, 60, 40, 54, 51, 52, + 52, 60, -22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, 32, 33, 34, -27, -20, 52, 60, 40, + 5, -21, -21, -21, 51, -27, -73, 65, -17, 67, + -18, 62, -38, 2, 68, 68, 51, 51, 51, 51, + 51, 51, 5, 52, -9, -8, -14, -58, 5, 41, + -51, -50, -7, -39, -44, 52, 2, -34, 36, -30, + 59, -25, 7, 8, 9, 10, 11, 12, 14, 15, + 16, 17, 21, 22, 23, 24, 25, 26, 27, 28, + 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, + 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, + 5, -52, 60, 52, -15, -59, -20, 41, 51, 51, + 51, 5, 49, 48, -31, 68, -35, 2, -16, 62, + -38, 2, 61, 61, 5, 5, 5, 5, 5, 5, + 52, -15, 68, 60, 51, 55, -50, 52, -21, -21, + -27, 59, 52, -15, -31, 52, 5, 5, 5, 52, + -15, -73, -31, 66, 68, 68, 60, -38, 2, 61, + 61, 52, -15, 54, -15, 52, -15, 54, -15, 52, + -15, 54, -15, 52, -9, -16, 51, 51, -27, 52, + 61, 52, -15, 52, -15, 52, -15, 52, 52, 54, + 66, -35, 61, 61, 52, -49, 54, 52, -49, 54, + 52, -49, 54, 5, 5, 52, 52, 52, -11, -10, + -7, 52, 2, 60, 60, 55, -49, 55, -49, 55, + -49, 52, -15, 54, -15, 55, -10, 52, -35, -35, + 55, 55, 55, 52, -49, 54, 55, -49, 55, } -var protoDef = [...]int{ +var protoDef = [...]int{ -2, -2, -2, -2, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 0, 0, 0, 0, 0, - 0, 0, -2, 5, 0, 15, 0, 0, 0, 42, - 0, 22, 204, 205, 206, 207, 208, 209, 210, 211, - 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, - 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, - 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, - 242, 243, 244, 245, 246, 0, 25, 27, 0, 0, - 0, 0, 76, 0, 0, 0, 18, 43, 0, 0, - 21, 0, 0, 0, 0, -2, -2, -2, 77, -2, - 17, 19, 20, 23, 0, 29, 30, 31, 32, 33, - -2, 34, 0, 0, 39, 26, 28, 0, 160, 162, - 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, - 174, 0, 0, 0, 0, 240, 241, 0, 236, 210, - 234, 0, 239, 140, 235, 0, 148, 150, 151, 152, - 153, 155, 0, 239, 142, 0, 177, 179, 180, 181, - 183, 0, 186, 188, 189, 190, 192, 0, 24, 0, - 45, 48, 53, 0, 64, 0, 35, 38, 41, 36, - 37, 40, 158, 159, 173, 0, 233, 0, 233, 0, - 233, 0, 0, 128, 129, 0, 0, 0, 0, 145, - 0, 146, 147, 154, 0, 0, 133, 134, 137, 0, - 175, 176, 182, 184, 185, 191, 0, 44, 46, 49, - 50, 51, 52, 0, 59, -2, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 125, 0, 0, 0, 0, - -2, 0, 139, 143, 0, 0, 113, 114, 115, 116, - 117, 118, 119, 120, 121, 122, 123, 124, 0, 0, - 141, 0, 138, 0, 54, 0, 58, -2, 0, 53, - 65, 66, 0, 0, 0, 0, 0, 0, 0, 126, - 127, 0, 88, 0, 130, 131, 0, 98, 100, 101, - 102, 103, 105, 0, 233, 0, 144, 0, 156, 0, - 132, 135, 136, 0, 245, 196, 55, 0, 0, 67, - -2, 0, 53, 61, 63, 0, 0, 0, 0, 0, - 0, 81, 0, 86, 0, 0, 96, 97, 104, 0, - 0, 110, 0, 0, 157, 0, 195, 56, 0, 0, - 57, 0, 53, 60, 62, 78, 0, -2, 0, 79, - 0, -2, 0, 80, 0, -2, 0, 85, 87, 89, - 0, 0, 111, 112, 0, 68, -2, 69, -2, 70, - 73, 82, 0, -2, 83, 0, -2, 84, 0, -2, - 0, 0, 0, 0, 53, 0, 53, 90, 0, 91, - 0, 92, 0, 106, 0, -2, 0, 0, 71, 74, - 72, 75, 93, 94, 95, 107, 0, -2, 0, 108, - 0, 193, -2, 109, 0, 198, 200, 201, 203, 194, - 197, 202, + 0, 0, -2, 5, 0, 15, 0, 0, 0, 48, + 0, 22, 374, 375, 376, 377, 378, 379, 380, 381, + 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, + 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, + 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, + 412, 413, 414, 415, 416, 0, 31, 33, 0, 0, + 0, 0, 79, 0, 0, 0, 18, 0, 0, 49, + 21, 0, 0, 0, 0, -2, -2, -2, 80, -2, + 17, 19, 20, 23, 0, 35, 36, 37, 38, 39, + -2, 40, 0, 0, 45, 32, 34, 0, 177, 179, + 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, + 191, 0, 0, 0, 0, 0, 0, 0, 0, 157, + 81, 0, 247, 24, 221, 222, 223, 224, 225, 226, + 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, + 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, + 248, 249, 250, 251, 252, 253, 0, 165, 167, 168, + 169, 170, 172, 0, 0, 159, 294, 295, 296, 297, + 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, + 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, + 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, + 328, 329, 330, 331, 332, 333, 334, 0, 194, 196, + 197, 198, 200, 0, 0, 0, 0, 83, 0, 26, + 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, + 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, + 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, + 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, + 0, 203, 205, 206, 207, 209, 0, 30, 0, -2, + 54, 59, 0, 70, 0, 41, 44, 47, 42, 43, + 46, 175, 176, 190, 0, 403, 0, 403, 0, 403, + 0, 0, 144, 146, 0, 0, 0, 0, 161, 82, + 0, 0, 163, 164, 171, 0, 0, 149, 151, 154, + 0, 192, 193, 199, 0, 0, 0, 0, 84, 0, + 201, 202, 208, 0, 50, 52, 55, 56, 57, 58, + 0, 65, -2, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 142, 0, 0, 0, 0, -2, 0, 156, + 160, 0, 0, 130, 131, 132, 133, 134, 135, 136, + 137, 138, 139, 140, 141, 25, 0, 158, 0, 0, + 155, 0, 0, 0, 0, 27, 0, 0, 60, 0, + 64, -2, 0, 59, 71, 72, 0, 0, 0, 0, + 0, 0, 0, 143, 0, 104, 0, 145, 147, 148, + 0, 115, 117, 118, 119, 120, 122, 0, 360, 85, + 0, 28, 335, 336, 337, 338, 339, 340, 341, 342, + 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, + 353, 354, 355, 356, 357, 358, 359, 361, 362, 363, + 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, + 0, 162, 0, 173, 0, 150, 152, 153, 0, 0, + 0, 0, 0, 415, 0, 61, 0, 0, 73, -2, + 0, 59, 67, 69, 0, 0, 0, 0, 0, 0, + 93, 0, 103, 0, 0, 113, 114, 121, 0, 0, + 86, 0, 127, 0, 0, 174, 0, 0, 0, 101, + 0, 0, 0, 213, 62, 63, 0, 0, 59, 66, + 68, 87, 0, -2, 0, 88, 0, -2, 0, 89, + 0, -2, 0, 94, 105, 106, 0, 0, 29, 128, + 129, 95, 0, 96, 0, 97, 0, 102, 210, -2, + 212, 74, 75, 77, 90, 0, -2, 91, 0, -2, + 92, 0, -2, 0, 0, 98, 99, 100, 0, 215, + 217, 218, 220, 0, 0, 107, 0, 108, 0, 109, + 0, 123, 0, -2, 0, 211, 214, 219, 76, 78, + 110, 111, 112, 124, 0, -2, 125, 0, 126, } -var protoTok1 = [...]int{ +var protoTok1 = [...]int{ 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, @@ -777,14 +837,15 @@ var protoTok1 = [...]int{ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 54, 3, 55, 77, } -var protoTok2 = [...]int{ +var protoTok2 = [...]int{ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, } + var protoTok3 = [...]int{ 0, } @@ -1128,1182 +1189,1362 @@ protodefault: case 1: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:116 +//line proto.y:143 { - protoVAL.file = &fileNode{syntax: protoDollar[1].syn} - protoVAL.file.setRange(protoDollar[1].syn, protoDollar[1].syn) + protoVAL.file = ast.NewFileNode(protoDollar[1].syn, nil) protolex.(*protoLex).res = protoVAL.file } case 2: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:121 +//line proto.y:147 { - protoVAL.file = &fileNode{decls: protoDollar[1].fileDecls} - if len(protoDollar[1].fileDecls) > 0 { - protoVAL.file.setRange(protoDollar[1].fileDecls[0], protoDollar[1].fileDecls[len(protoDollar[1].fileDecls)-1]) - } + protoVAL.file = ast.NewFileNode(nil, protoDollar[1].fileDecls) protolex.(*protoLex).res = protoVAL.file } case 3: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:128 +//line proto.y:151 { - protoVAL.file = &fileNode{syntax: protoDollar[1].syn, decls: protoDollar[2].fileDecls} - var end node - if len(protoDollar[2].fileDecls) > 0 { - end = protoDollar[2].fileDecls[len(protoDollar[2].fileDecls)-1] - } else { - end = protoDollar[1].syn - } - protoVAL.file.setRange(protoDollar[1].syn, end) + protoVAL.file = ast.NewFileNode(protoDollar[1].syn, protoDollar[2].fileDecls) protolex.(*protoLex).res = protoVAL.file } case 4: protoDollar = protoS[protopt-0 : protopt+1] -//line proto.y:139 +//line proto.y:155 { } case 5: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:142 +//line proto.y:158 + { + if protoDollar[2].fileDecl != nil { + protoVAL.fileDecls = append(protoDollar[1].fileDecls, protoDollar[2].fileDecl) + } else { + protoVAL.fileDecls = protoDollar[1].fileDecls + } + } + case 6: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:165 { - protoVAL.fileDecls = append(protoDollar[1].fileDecls, protoDollar[2].fileDecls...) + if protoDollar[1].fileDecl != nil { + protoVAL.fileDecls = []ast.FileElement{protoDollar[1].fileDecl} + } else { + protoVAL.fileDecls = nil + } } case 7: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:147 +//line proto.y:173 { - protoVAL.fileDecls = []*fileElement{{imp: protoDollar[1].imprt}} + protoVAL.fileDecl = protoDollar[1].imprt } case 8: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:150 +//line proto.y:176 { - protoVAL.fileDecls = []*fileElement{{pkg: protoDollar[1].pkg}} + protoVAL.fileDecl = protoDollar[1].pkg } case 9: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:153 +//line proto.y:179 { - protoVAL.fileDecls = []*fileElement{{option: protoDollar[1].opts[0]}} + protoVAL.fileDecl = protoDollar[1].opt } case 10: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:156 +//line proto.y:182 { - protoVAL.fileDecls = []*fileElement{{message: protoDollar[1].msg}} + protoVAL.fileDecl = protoDollar[1].msg } case 11: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:159 +//line proto.y:185 { - protoVAL.fileDecls = []*fileElement{{enum: protoDollar[1].en}} + protoVAL.fileDecl = protoDollar[1].en } case 12: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:162 +//line proto.y:188 { - protoVAL.fileDecls = []*fileElement{{extend: protoDollar[1].extend}} + protoVAL.fileDecl = protoDollar[1].extend } case 13: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:165 +//line proto.y:191 { - protoVAL.fileDecls = []*fileElement{{service: protoDollar[1].svc}} + protoVAL.fileDecl = protoDollar[1].svc } case 14: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:168 +//line proto.y:194 { - protoVAL.fileDecls = []*fileElement{{empty: protoDollar[1].b}} + protoVAL.fileDecl = ast.NewEmptyDeclNode(protoDollar[1].b) } case 15: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:171 +//line proto.y:197 { + protoVAL.fileDecl = nil } case 16: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:173 +//line proto.y:200 { + protoVAL.fileDecl = nil } case 17: protoDollar = protoS[protopt-4 : protopt+1] -//line proto.y:176 +//line proto.y:204 { - protoVAL.syn = &syntaxNode{syntax: protoDollar[3].str} - protoVAL.syn.setRange(protoDollar[1].id, protoDollar[4].b) + protoVAL.syn = ast.NewSyntaxNode(protoDollar[1].id.ToKeyword(), protoDollar[2].b, protoDollar[3].str.toStringValueNode(), protoDollar[4].b) } case 18: protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:181 +//line proto.y:208 { - protoVAL.imprt = &importNode{name: protoDollar[2].str} - protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[3].b) + protoVAL.imprt = ast.NewImportNode(protoDollar[1].id.ToKeyword(), nil, nil, protoDollar[2].str.toStringValueNode(), protoDollar[3].b) } case 19: protoDollar = protoS[protopt-4 : protopt+1] -//line proto.y:185 +//line proto.y:211 { - protoVAL.imprt = &importNode{name: protoDollar[3].str, weak: true} - protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[4].b) + protoVAL.imprt = ast.NewImportNode(protoDollar[1].id.ToKeyword(), nil, protoDollar[2].id.ToKeyword(), protoDollar[3].str.toStringValueNode(), protoDollar[4].b) } case 20: protoDollar = protoS[protopt-4 : protopt+1] -//line proto.y:189 +//line proto.y:214 { - protoVAL.imprt = &importNode{name: protoDollar[3].str, public: true} - protoVAL.imprt.setRange(protoDollar[1].id, protoDollar[4].b) + protoVAL.imprt = ast.NewImportNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), nil, protoDollar[3].str.toStringValueNode(), protoDollar[4].b) } case 21: protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:194 +//line proto.y:218 { - protoVAL.pkg = &packageNode{name: protoDollar[2].cid} - protoVAL.pkg.setRange(protoDollar[1].id, protoDollar[3].b) + protoVAL.pkg = ast.NewPackageNode(protoDollar[1].id.ToKeyword(), protoDollar[2].cid.toIdentValueNode(nil), protoDollar[3].b) } case 22: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:199 +//line proto.y:222 { - protoVAL.cid = &compoundIdentNode{val: protoDollar[1].id.val} - protoVAL.cid.setRange(protoDollar[1].id, protoDollar[1].id) + protoVAL.cid = &identList{protoDollar[1].id, nil, nil} } case 23: protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:203 +//line proto.y:225 { - protoVAL.cid = &compoundIdentNode{val: protoDollar[1].cid.val + "." + protoDollar[3].id.val} - protoVAL.cid.setRange(protoDollar[1].cid, protoDollar[3].id) + protoVAL.cid = &identList{protoDollar[1].id, protoDollar[2].b, protoDollar[3].cid} } case 24: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:208 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:233 { - n := &optionNameNode{parts: protoDollar[2].optNm} - n.setRange(protoDollar[2].optNm[0], protoDollar[2].optNm[len(protoDollar[2].optNm)-1]) - o := &optionNode{name: n, val: protoDollar[4].v} - o.setRange(protoDollar[1].id, protoDollar[5].b) - protoVAL.opts = []*optionNode{o} + protoVAL.cid = &identList{protoDollar[1].id, nil, nil} } - case 26: + case 25: protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:218 +//line proto.y:236 { - protoVAL.optNm = append(protoDollar[1].optNm, protoDollar[3].optNm...) + protoVAL.cid = &identList{protoDollar[1].id, protoDollar[2].b, protoDollar[3].cid} } - case 27: + case 26: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:223 +//line proto.y:240 { - nm := &compoundIdentNode{val: protoDollar[1].id.val} - nm.setRange(protoDollar[1].id, protoDollar[1].id) - protoVAL.optNm = toNameParts(nm) + protoVAL.cid = &identList{protoDollar[1].id, nil, nil} } - case 28: + case 27: protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:228 +//line proto.y:243 { - p := &optionNamePartNode{text: protoDollar[2].cid, isExtension: true} - p.setRange(protoDollar[1].b, protoDollar[3].b) - protoVAL.optNm = []*optionNamePartNode{p} + protoVAL.cid = &identList{protoDollar[1].id, protoDollar[2].b, protoDollar[3].cid} } - case 31: + case 28: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:237 +//line proto.y:247 { - protoVAL.v = protoDollar[1].str + protoVAL.cid = &identList{protoDollar[1].id, nil, nil} } - case 33: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:241 - { - if protoDollar[1].id.val == "true" { - protoVAL.v = &boolLiteralNode{identNode: protoDollar[1].id, val: true} - } else if protoDollar[1].id.val == "false" { - protoVAL.v = &boolLiteralNode{identNode: protoDollar[1].id, val: false} - } else if protoDollar[1].id.val == "inf" { - f := &compoundFloatNode{val: math.Inf(1)} - f.setRange(protoDollar[1].id, protoDollar[1].id) - protoVAL.v = f - } else if protoDollar[1].id.val == "nan" { - f := &compoundFloatNode{val: math.NaN()} - f.setRange(protoDollar[1].id, protoDollar[1].id) - protoVAL.v = f - } else { - protoVAL.v = protoDollar[1].id - } + case 29: + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:250 + { + protoVAL.cid = &identList{protoDollar[1].id, protoDollar[2].b, protoDollar[3].cid} } - case 34: + case 30: + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:254 + { + refs, dots := protoDollar[2].optNms.toNodes() + optName := ast.NewOptionNameNode(refs, dots) + protoVAL.opt = ast.NewOptionNode(protoDollar[1].id.ToKeyword(), optName, protoDollar[3].b, protoDollar[4].v, protoDollar[5].b) + } + case 31: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:259 +//line proto.y:260 { - protoVAL.v = protoDollar[1].f + protoVAL.optNms = &fieldRefList{protoDollar[1].ref, nil, nil} } - case 35: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:262 + case 32: + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:263 { - f := &compoundFloatNode{val: -protoDollar[2].f.val} - f.setRange(protoDollar[1].b, protoDollar[2].f) - protoVAL.v = f + protoVAL.optNms = &fieldRefList{protoDollar[1].ref, protoDollar[2].b, protoDollar[3].optNms} } - case 36: - protoDollar = protoS[protopt-2 : protopt+1] + case 33: + protoDollar = protoS[protopt-1 : protopt+1] //line proto.y:267 { - f := &compoundFloatNode{val: protoDollar[2].f.val} - f.setRange(protoDollar[1].b, protoDollar[2].f) - protoVAL.v = f + protoVAL.ref = ast.NewFieldReferenceNode(protoDollar[1].id) } - case 37: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:272 + case 34: + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:270 { - f := &compoundFloatNode{val: math.Inf(1)} - f.setRange(protoDollar[1].b, protoDollar[2].id) - protoVAL.v = f + protoVAL.ref = ast.NewExtensionFieldReferenceNode(protoDollar[1].b, protoDollar[2].tid, protoDollar[3].b) } - case 38: - protoDollar = protoS[protopt-2 : protopt+1] + case 37: + protoDollar = protoS[protopt-1 : protopt+1] //line proto.y:277 { - f := &compoundFloatNode{val: math.Inf(-1)} - f.setRange(protoDollar[1].b, protoDollar[2].id) - protoVAL.v = f + protoVAL.v = protoDollar[1].str.toStringValueNode() } case 39: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:282 +//line proto.y:281 { - protoVAL.v = protoDollar[1].i + if protoDollar[1].id.Val == "true" || protoDollar[1].id.Val == "false" { + protoVAL.v = ast.NewBoolLiteralNode(protoDollar[1].id.ToKeyword()) + } else if protoDollar[1].id.Val == "inf" || protoDollar[1].id.Val == "nan" { + protoVAL.v = ast.NewSpecialFloatLiteralNode(protoDollar[1].id.ToKeyword()) + } else { + protoVAL.v = protoDollar[1].id + } } case 40: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:285 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:291 { - i := &compoundUintNode{val: protoDollar[2].i.val} - i.setRange(protoDollar[1].b, protoDollar[2].i) - protoVAL.v = i + protoVAL.v = protoDollar[1].f } case 41: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:290 +//line proto.y:294 { - if protoDollar[2].i.val > math.MaxInt64+1 { - // can't represent as int so treat as float literal - f := &compoundFloatNode{val: -float64(protoDollar[2].i.val)} - f.setRange(protoDollar[1].b, protoDollar[2].i) - protoVAL.v = f - } else { - i := &compoundIntNode{val: -int64(protoDollar[2].i.val)} - i.setRange(protoDollar[1].b, protoDollar[2].i) - protoVAL.v = i - } + protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, protoDollar[2].f) } case 42: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:303 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:297 { - protoVAL.str = &compoundStringNode{val: protoDollar[1].s.val} - protoVAL.str.setRange(protoDollar[1].s, protoDollar[1].s) + protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, protoDollar[2].f) } case 43: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:307 +//line proto.y:300 { - protoVAL.str = &compoundStringNode{val: protoDollar[1].str.val + protoDollar[2].s.val} - protoVAL.str.setRange(protoDollar[1].str, protoDollar[2].s) + f := ast.NewSpecialFloatLiteralNode(protoDollar[2].id.ToKeyword()) + protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, f) } case 44: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:312 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:304 + { + f := ast.NewSpecialFloatLiteralNode(protoDollar[2].id.ToKeyword()) + protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, f) + } + case 45: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:308 { - a := &aggregateLiteralNode{elements: protoDollar[2].agg} - a.setRange(protoDollar[1].b, protoDollar[3].b) - protoVAL.v = a + protoVAL.v = protoDollar[1].i } case 46: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:319 +//line proto.y:311 { - protoVAL.agg = append(protoDollar[1].agg, protoDollar[2].agg...) + protoVAL.v = ast.NewPositiveUintLiteralNode(protoDollar[1].b, protoDollar[2].i) } case 47: - protoDollar = protoS[protopt-0 : protopt+1] -//line proto.y:322 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:314 + { + if protoDollar[2].i.Val > math.MaxInt64+1 { + // can't represent as int so treat as float literal + protoVAL.v = ast.NewSignedFloatLiteralNode(protoDollar[1].b, protoDollar[2].i) + } else { + protoVAL.v = ast.NewNegativeIntLiteralNode(protoDollar[1].b, protoDollar[2].i) + } + } + case 48: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:323 { - protoVAL.agg = nil + protoVAL.str = &stringList{protoDollar[1].s, nil} } case 49: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:327 +//line proto.y:326 { - protoVAL.agg = protoDollar[1].agg + protoVAL.str = &stringList{protoDollar[1].s, protoDollar[2].str} } case 50: - protoDollar = protoS[protopt-2 : protopt+1] + protoDollar = protoS[protopt-3 : protopt+1] //line proto.y:330 { - protoVAL.agg = protoDollar[1].agg + fields, delims := protoDollar[2].msgLit.toNodes() + protoVAL.v = ast.NewMessageLiteralNode(protoDollar[1].b, fields, delims, protoDollar[3].b) } case 51: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:333 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:335 { + if protoDollar[1].msgEntry != nil { + protoVAL.msgLit = &messageFieldList{protoDollar[1].msgEntry, nil} + } else { + protoVAL.msgLit = nil + } } case 52: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:335 +//line proto.y:342 { + if protoDollar[1].msgEntry != nil { + protoVAL.msgLit = &messageFieldList{protoDollar[1].msgEntry, protoDollar[2].msgLit} + } else { + protoVAL.msgLit = protoDollar[2].msgLit + } } case 53: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:337 + protoDollar = protoS[protopt-0 : protopt+1] +//line proto.y:349 { + protoVAL.msgLit = nil } case 54: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:340 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:353 { - a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[3].v} - a.setRange(protoDollar[1].aggName, protoDollar[3].v) - protoVAL.agg = []*aggregateEntryNode{a} + if protoDollar[1].msgField != nil { + protoVAL.msgEntry = &messageFieldEntry{protoDollar[1].msgField, nil} + } else { + protoVAL.msgEntry = nil + } } case 55: - protoDollar = protoS[protopt-4 : protopt+1] -//line proto.y:345 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:360 { - s := &sliceLiteralNode{} - s.setRange(protoDollar[3].b, protoDollar[4].b) - a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s} - a.setRange(protoDollar[1].aggName, protoDollar[4].b) - protoVAL.agg = []*aggregateEntryNode{a} + if protoDollar[1].msgField != nil { + protoVAL.msgEntry = &messageFieldEntry{protoDollar[1].msgField, protoDollar[2].b} + } else { + protoVAL.msgEntry = nil + } } case 56: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:352 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:367 { - s := &sliceLiteralNode{elements: protoDollar[4].sl} - s.setRange(protoDollar[3].b, protoDollar[5].b) - a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s} - a.setRange(protoDollar[1].aggName, protoDollar[5].b) - protoVAL.agg = []*aggregateEntryNode{a} + if protoDollar[1].msgField != nil { + protoVAL.msgEntry = &messageFieldEntry{protoDollar[1].msgField, protoDollar[2].b} + } else { + protoVAL.msgEntry = nil + } } case 57: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:359 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:374 { + protoVAL.msgEntry = nil } case 58: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:361 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:377 { - a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[3].v} - a.setRange(protoDollar[1].aggName, protoDollar[3].v) - protoVAL.agg = []*aggregateEntryNode{a} + protoVAL.msgEntry = nil } case 59: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:366 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:380 { - a := &aggregateEntryNode{name: protoDollar[1].aggName, val: protoDollar[2].v} - a.setRange(protoDollar[1].aggName, protoDollar[2].v) - protoVAL.agg = []*aggregateEntryNode{a} + protoVAL.msgEntry = nil } case 60: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:371 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:384 { - s := &aggregateLiteralNode{elements: protoDollar[4].agg} - s.setRange(protoDollar[3].b, protoDollar[5].b) - a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s} - a.setRange(protoDollar[1].aggName, protoDollar[5].b) - protoVAL.agg = []*aggregateEntryNode{a} - } + if protoDollar[1].ref != nil { + protoVAL.msgField = ast.NewMessageFieldNode(protoDollar[1].ref, protoDollar[2].b, protoDollar[3].v) + } else { + protoVAL.msgField = nil + } + } case 61: protoDollar = protoS[protopt-4 : protopt+1] -//line proto.y:378 +//line proto.y:391 { - s := &aggregateLiteralNode{elements: protoDollar[3].agg} - s.setRange(protoDollar[2].b, protoDollar[4].b) - a := &aggregateEntryNode{name: protoDollar[1].aggName, val: s} - a.setRange(protoDollar[1].aggName, protoDollar[4].b) - protoVAL.agg = []*aggregateEntryNode{a} + if protoDollar[1].ref != nil { + val := ast.NewArrayLiteralNode(protoDollar[3].b, nil, nil, protoDollar[4].b) + protoVAL.msgField = ast.NewMessageFieldNode(protoDollar[1].ref, protoDollar[2].b, val) + } else { + protoVAL.msgField = nil + } } case 62: protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:385 +//line proto.y:399 { + if protoDollar[1].ref != nil { + vals, commas := protoDollar[4].sl.toNodes() + val := ast.NewArrayLiteralNode(protoDollar[3].b, vals, commas, protoDollar[5].b) + protoVAL.msgField = ast.NewMessageFieldNode(protoDollar[1].ref, protoDollar[2].b, val) + } else { + protoVAL.msgField = nil + } } case 63: - protoDollar = protoS[protopt-4 : protopt+1] -//line proto.y:387 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:408 { + protoVAL.msgField = nil } case 64: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:390 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:411 { - n := &compoundIdentNode{val: protoDollar[1].id.val} - n.setRange(protoDollar[1].id, protoDollar[1].id) - protoVAL.aggName = &aggregateNameNode{name: n} - protoVAL.aggName.setRange(protoDollar[1].id, protoDollar[1].id) + if protoDollar[1].ref != nil { + protoVAL.msgField = ast.NewMessageFieldNode(protoDollar[1].ref, protoDollar[2].b, protoDollar[3].v) + } else { + protoVAL.msgField = nil + } } case 65: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:396 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:418 { - protoVAL.aggName = &aggregateNameNode{name: protoDollar[2].cid, isExtension: true} - protoVAL.aggName.setRange(protoDollar[1].b, protoDollar[3].b) + if protoDollar[1].ref != nil { + protoVAL.msgField = ast.NewMessageFieldNode(protoDollar[1].ref, nil, protoDollar[2].v) + } else { + protoVAL.msgField = nil + } } case 66: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:400 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:425 { + if protoDollar[1].ref != nil { + fields, delims := protoDollar[4].msgLit.toNodes() + msg := ast.NewMessageLiteralNode(protoDollar[3].b, fields, delims, protoDollar[5].b) + protoVAL.msgField = ast.NewMessageFieldNode(protoDollar[1].ref, protoDollar[2].b, msg) + } else { + protoVAL.msgField = nil + } } case 67: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:403 + protoDollar = protoS[protopt-4 : protopt+1] +//line proto.y:434 { - protoVAL.sl = []valueNode{protoDollar[1].v} + if protoDollar[1].ref != nil { + fields, delims := protoDollar[3].msgLit.toNodes() + msg := ast.NewMessageLiteralNode(protoDollar[2].b, fields, delims, protoDollar[4].b) + protoVAL.msgField = ast.NewMessageFieldNode(protoDollar[1].ref, nil, msg) + } else { + protoVAL.msgField = nil + } } case 68: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:406 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:443 { - protoVAL.sl = append(protoDollar[1].sl, protoDollar[3].v) + protoVAL.msgField = nil } case 69: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:409 + protoDollar = protoS[protopt-4 : protopt+1] +//line proto.y:446 { - protoVAL.sl = append(protoDollar[1].sl, protoDollar[3].v) + protoVAL.msgField = nil } case 70: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:412 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:450 { - s := &aggregateLiteralNode{elements: protoDollar[2].agg} - s.setRange(protoDollar[1].b, protoDollar[3].b) - protoVAL.sl = []valueNode{s} + protoVAL.ref = ast.NewFieldReferenceNode(protoDollar[1].id) } case 71: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:417 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:453 { - s := &aggregateLiteralNode{elements: protoDollar[4].agg} - s.setRange(protoDollar[3].b, protoDollar[5].b) - protoVAL.sl = append(protoDollar[1].sl, s) + protoVAL.ref = ast.NewExtensionFieldReferenceNode(protoDollar[1].b, protoDollar[2].tid, protoDollar[3].b) } case 72: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:422 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:456 { - s := &aggregateLiteralNode{elements: protoDollar[4].agg} - s.setRange(protoDollar[3].b, protoDollar[5].b) - protoVAL.sl = append(protoDollar[1].sl, s) + protoVAL.ref = nil } case 73: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:427 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:460 { + protoVAL.sl = &valueList{protoDollar[1].v, nil, nil} } case 74: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:429 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:463 { + protoVAL.sl = &valueList{protoDollar[1].v, protoDollar[2].b, protoDollar[3].sl} } case 75: + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:466 + { + fields, delims := protoDollar[2].msgLit.toNodes() + msg := ast.NewMessageLiteralNode(protoDollar[1].b, fields, delims, protoDollar[3].b) + protoVAL.sl = &valueList{msg, nil, nil} + } + case 76: protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:431 +//line proto.y:471 { + fields, delims := protoDollar[2].msgLit.toNodes() + msg := ast.NewMessageLiteralNode(protoDollar[1].b, fields, delims, protoDollar[3].b) + protoVAL.sl = &valueList{msg, protoDollar[4].b, protoDollar[5].sl} } case 77: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:435 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:476 { - protoVAL.cid = &compoundIdentNode{val: "." + protoDollar[2].cid.val} - protoVAL.cid.setRange(protoDollar[1].b, protoDollar[2].cid) + protoVAL.sl = nil } case 78: - protoDollar = protoS[protopt-6 : protopt+1] -//line proto.y:440 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:479 { - lbl := fieldLabel{identNode: protoDollar[1].id, required: true} - protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].cid, name: protoDollar[3].id, tag: protoDollar[5].i} - protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b) + protoVAL.sl = protoDollar[5].sl } case 79: - protoDollar = protoS[protopt-6 : protopt+1] -//line proto.y:445 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:483 { - lbl := fieldLabel{identNode: protoDollar[1].id} - protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].cid, name: protoDollar[3].id, tag: protoDollar[5].i} - protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b) + protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil) } case 80: - protoDollar = protoS[protopt-6 : protopt+1] -//line proto.y:450 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:486 { - lbl := fieldLabel{identNode: protoDollar[1].id, repeated: true} - protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].cid, name: protoDollar[3].id, tag: protoDollar[5].i} - protoVAL.fld.setRange(protoDollar[1].id, protoDollar[6].b) + protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b) } case 81: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:455 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:490 { - protoVAL.fld = &fieldNode{fldType: protoDollar[1].cid, name: protoDollar[2].id, tag: protoDollar[4].i} - protoVAL.fld.setRange(protoDollar[1].cid, protoDollar[5].b) + protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil) } case 82: - protoDollar = protoS[protopt-7 : protopt+1] -//line proto.y:459 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:493 { - lbl := fieldLabel{identNode: protoDollar[1].id, required: true} - protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].cid, name: protoDollar[3].id, tag: protoDollar[5].i, options: protoDollar[6].cmpctOpts} - protoVAL.fld.setRange(protoDollar[1].id, protoDollar[7].b) + protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b) } case 83: - protoDollar = protoS[protopt-7 : protopt+1] -//line proto.y:464 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:497 { - lbl := fieldLabel{identNode: protoDollar[1].id} - protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].cid, name: protoDollar[3].id, tag: protoDollar[5].i, options: protoDollar[6].cmpctOpts} - protoVAL.fld.setRange(protoDollar[1].id, protoDollar[7].b) + protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil) } case 84: - protoDollar = protoS[protopt-7 : protopt+1] -//line proto.y:469 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:500 { - lbl := fieldLabel{identNode: protoDollar[1].id, repeated: true} - protoVAL.fld = &fieldNode{label: lbl, fldType: protoDollar[2].cid, name: protoDollar[3].id, tag: protoDollar[5].i, options: protoDollar[6].cmpctOpts} - protoVAL.fld.setRange(protoDollar[1].id, protoDollar[7].b) + protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b) } case 85: - protoDollar = protoS[protopt-6 : protopt+1] -//line proto.y:474 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:504 { - protoVAL.fld = &fieldNode{fldType: protoDollar[1].cid, name: protoDollar[2].id, tag: protoDollar[4].i, options: protoDollar[5].cmpctOpts} - protoVAL.fld.setRange(protoDollar[1].cid, protoDollar[6].b) + protoVAL.tid = protoDollar[1].cid.toIdentValueNode(nil) } case 86: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:479 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:507 { - protoVAL.cmpctOpts = &compactOptionsNode{decls: protoDollar[2].opts} - protoVAL.cmpctOpts.setRange(protoDollar[1].b, protoDollar[3].b) + protoVAL.tid = protoDollar[2].cid.toIdentValueNode(protoDollar[1].b) } case 87: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:484 + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:511 + { + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b) + } + case 88: + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:514 { - protoVAL.opts = append(protoDollar[1].opts, protoDollar[3].opts...) + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b) } case 89: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:489 + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:517 { - n := &optionNameNode{parts: protoDollar[1].optNm} - n.setRange(protoDollar[1].optNm[0], protoDollar[1].optNm[len(protoDollar[1].optNm)-1]) - o := &optionNode{name: n, val: protoDollar[3].v} - o.setRange(protoDollar[1].optNm[0], protoDollar[3].v) - protoVAL.opts = []*optionNode{o} + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b) } case 90: - protoDollar = protoS[protopt-8 : protopt+1] -//line proto.y:497 + protoDollar = protoS[protopt-7 : protopt+1] +//line proto.y:520 { - lbl := fieldLabel{identNode: protoDollar[1].id, required: true} - protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].i, decls: protoDollar[7].msgDecls} - protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b) + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b) } case 91: - protoDollar = protoS[protopt-8 : protopt+1] -//line proto.y:502 + protoDollar = protoS[protopt-7 : protopt+1] +//line proto.y:523 { - lbl := fieldLabel{identNode: protoDollar[1].id} - protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].i, decls: protoDollar[7].msgDecls} - protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b) + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b) } case 92: - protoDollar = protoS[protopt-8 : protopt+1] -//line proto.y:507 + protoDollar = protoS[protopt-7 : protopt+1] +//line proto.y:526 { - lbl := fieldLabel{identNode: protoDollar[1].id, repeated: true} - protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].i, decls: protoDollar[7].msgDecls} - protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b) + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b) } case 93: - protoDollar = protoS[protopt-9 : protopt+1] -//line proto.y:512 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:529 { - lbl := fieldLabel{identNode: protoDollar[1].id, required: true} - protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].i, options: protoDollar[6].cmpctOpts, decls: protoDollar[8].msgDecls} - protoVAL.grp.setRange(protoDollar[1].id, protoDollar[9].b) + protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, protoDollar[5].b) } case 94: - protoDollar = protoS[protopt-9 : protopt+1] -//line proto.y:517 + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:532 { - lbl := fieldLabel{identNode: protoDollar[1].id} - protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].i, options: protoDollar[6].cmpctOpts, decls: protoDollar[8].msgDecls} - protoVAL.grp.setRange(protoDollar[1].id, protoDollar[9].b) + protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, protoDollar[6].b) } case 95: - protoDollar = protoS[protopt-9 : protopt+1] -//line proto.y:522 + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:536 { - lbl := fieldLabel{identNode: protoDollar[1].id, repeated: true} - protoVAL.grp = &groupNode{groupKeyword: protoDollar[2].id, label: lbl, name: protoDollar[3].id, tag: protoDollar[5].i, options: protoDollar[6].cmpctOpts, decls: protoDollar[8].msgDecls} - protoVAL.grp.setRange(protoDollar[1].id, protoDollar[9].b) + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b) } case 96: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:528 + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:539 { - protoVAL.oo = &oneOfNode{name: protoDollar[2].id, decls: protoDollar[4].ooDecls} - protoVAL.oo.setRange(protoDollar[1].id, protoDollar[5].b) + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b) } case 97: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:533 + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:542 + { + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b) + } + case 98: + protoDollar = protoS[protopt-7 : protopt+1] +//line proto.y:545 { - protoVAL.ooDecls = append(protoDollar[1].ooDecls, protoDollar[2].ooDecls...) + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b) } case 99: - protoDollar = protoS[protopt-0 : protopt+1] -//line proto.y:537 + protoDollar = protoS[protopt-7 : protopt+1] +//line proto.y:548 { - protoVAL.ooDecls = nil + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b) } case 100: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:541 + protoDollar = protoS[protopt-7 : protopt+1] +//line proto.y:551 { - protoVAL.ooDecls = []*oneOfElement{{option: protoDollar[1].opts[0]}} + protoVAL.fld = ast.NewFieldNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b) } case 101: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:544 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:554 { - protoVAL.ooDecls = []*oneOfElement{{field: protoDollar[1].fld}} + protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, protoDollar[5].b) } case 102: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:547 + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:557 { - protoVAL.ooDecls = []*oneOfElement{{group: protoDollar[1].grp}} + protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, protoDollar[6].b) } case 103: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:550 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:561 { - protoVAL.ooDecls = []*oneOfElement{{empty: protoDollar[1].b}} + opts, commas := protoDollar[2].opts.toNodes() + protoVAL.cmpctOpts = ast.NewCompactOptionsNode(protoDollar[1].b, opts, commas, protoDollar[3].b) } case 104: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:553 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:566 { + protoVAL.opts = &compactOptionList{protoDollar[1].opt, nil, nil} } case 105: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:555 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:569 { + protoVAL.opts = &compactOptionList{protoDollar[1].opt, protoDollar[2].b, protoDollar[3].opts} } case 106: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:558 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:573 { - protoVAL.fld = &fieldNode{fldType: protoDollar[1].cid, name: protoDollar[2].id, tag: protoDollar[4].i} - protoVAL.fld.setRange(protoDollar[1].cid, protoDollar[5].b) + refs, dots := protoDollar[1].optNms.toNodes() + optName := ast.NewOptionNameNode(refs, dots) + protoVAL.opt = ast.NewCompactOptionNode(optName, protoDollar[2].b, protoDollar[3].v) } case 107: - protoDollar = protoS[protopt-6 : protopt+1] -//line proto.y:562 + protoDollar = protoS[protopt-8 : protopt+1] +//line proto.y:579 { - protoVAL.fld = &fieldNode{fldType: protoDollar[1].cid, name: protoDollar[2].id, tag: protoDollar[4].i, options: protoDollar[5].cmpctOpts} - protoVAL.fld.setRange(protoDollar[1].cid, protoDollar[6].b) + protoVAL.grp = ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b, protoDollar[7].msgDecls, protoDollar[8].b) } case 108: - protoDollar = protoS[protopt-7 : protopt+1] -//line proto.y:567 + protoDollar = protoS[protopt-8 : protopt+1] +//line proto.y:582 { - protoVAL.grp = &groupNode{groupKeyword: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].i, decls: protoDollar[6].msgDecls} - protoVAL.grp.setRange(protoDollar[1].id, protoDollar[7].b) + protoVAL.grp = ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b, protoDollar[7].msgDecls, protoDollar[8].b) } case 109: protoDollar = protoS[protopt-8 : protopt+1] -//line proto.y:571 +//line proto.y:585 { - protoVAL.grp = &groupNode{groupKeyword: protoDollar[1].id, name: protoDollar[2].id, tag: protoDollar[4].i, options: protoDollar[5].cmpctOpts, decls: protoDollar[7].msgDecls} - protoVAL.grp.setRange(protoDollar[1].id, protoDollar[8].b) + protoVAL.grp = ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, nil, protoDollar[6].b, protoDollar[7].msgDecls, protoDollar[8].b) } case 110: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:576 + protoDollar = protoS[protopt-9 : protopt+1] +//line proto.y:588 { - protoVAL.mapFld = &mapFieldNode{mapType: protoDollar[1].mapType, name: protoDollar[2].id, tag: protoDollar[4].i} - protoVAL.mapFld.setRange(protoDollar[1].mapType, protoDollar[5].b) + protoVAL.grp = ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b, protoDollar[8].msgDecls, protoDollar[9].b) } case 111: - protoDollar = protoS[protopt-6 : protopt+1] -//line proto.y:580 + protoDollar = protoS[protopt-9 : protopt+1] +//line proto.y:591 { - protoVAL.mapFld = &mapFieldNode{mapType: protoDollar[1].mapType, name: protoDollar[2].id, tag: protoDollar[4].i, options: protoDollar[5].cmpctOpts} - protoVAL.mapFld.setRange(protoDollar[1].mapType, protoDollar[6].b) + protoVAL.grp = ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b, protoDollar[8].msgDecls, protoDollar[9].b) } case 112: - protoDollar = protoS[protopt-6 : protopt+1] -//line proto.y:585 + protoDollar = protoS[protopt-9 : protopt+1] +//line proto.y:594 { - protoVAL.mapType = &mapTypeNode{mapKeyword: protoDollar[1].id, keyType: protoDollar[3].id, valueType: protoDollar[5].cid} - protoVAL.mapType.setRange(protoDollar[1].id, protoDollar[6].b) + protoVAL.grp = ast.NewGroupNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id.ToKeyword(), protoDollar[3].id, protoDollar[4].b, protoDollar[5].i, protoDollar[6].cmpctOpts, protoDollar[7].b, protoDollar[8].msgDecls, protoDollar[9].b) } - case 125: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:603 + case 113: + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:598 { - protoVAL.ext = &extensionRangeNode{ranges: protoDollar[2].rngs} - protoVAL.ext.setRange(protoDollar[1].id, protoDollar[3].b) + protoVAL.oo = ast.NewOneOfNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].ooDecls, protoDollar[5].b) } - case 126: - protoDollar = protoS[protopt-4 : protopt+1] -//line proto.y:607 + case 114: + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:602 { - protoVAL.ext = &extensionRangeNode{ranges: protoDollar[2].rngs, options: protoDollar[3].cmpctOpts} - protoVAL.ext.setRange(protoDollar[1].id, protoDollar[4].b) + if protoDollar[2].ooDecl != nil { + protoVAL.ooDecls = append(protoDollar[1].ooDecls, protoDollar[2].ooDecl) + } else { + protoVAL.ooDecls = protoDollar[1].ooDecls + } } - case 127: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:612 + case 115: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:609 { - protoVAL.rngs = append(protoDollar[1].rngs, protoDollar[3].rngs...) + if protoDollar[1].ooDecl != nil { + protoVAL.ooDecls = []ast.OneOfElement{protoDollar[1].ooDecl} + } else { + protoVAL.ooDecls = nil + } } - case 129: + case 116: + protoDollar = protoS[protopt-0 : protopt+1] +//line proto.y:616 + { + protoVAL.ooDecls = nil + } + case 117: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:617 +//line proto.y:620 { - r := &rangeNode{startNode: protoDollar[1].i} - r.setRange(protoDollar[1].i, protoDollar[1].i) - protoVAL.rngs = []*rangeNode{r} + protoVAL.ooDecl = protoDollar[1].opt } - case 130: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:622 + case 118: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:623 { - r := &rangeNode{startNode: protoDollar[1].i, endNode: protoDollar[3].i} - r.setRange(protoDollar[1].i, protoDollar[3].i) - protoVAL.rngs = []*rangeNode{r} + protoVAL.ooDecl = protoDollar[1].fld } - case 131: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:627 + case 119: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:626 { - r := &rangeNode{startNode: protoDollar[1].i, endNode: protoDollar[3].id, endMax: true} - r.setRange(protoDollar[1].i, protoDollar[3].id) - protoVAL.rngs = []*rangeNode{r} + protoVAL.ooDecl = protoDollar[1].grp } - case 132: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:633 + case 120: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:629 + { + protoVAL.ooDecl = ast.NewEmptyDeclNode(protoDollar[1].b) + } + case 121: + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:632 { - protoVAL.rngs = append(protoDollar[1].rngs, protoDollar[3].rngs...) + protoVAL.ooDecl = nil } - case 134: + case 122: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:638 +//line proto.y:635 { - r := &rangeNode{startNode: protoDollar[1].il} - r.setRange(protoDollar[1].il, protoDollar[1].il) - protoVAL.rngs = []*rangeNode{r} + protoVAL.ooDecl = nil } - case 135: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:643 + case 123: + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:639 { - r := &rangeNode{startNode: protoDollar[1].il, endNode: protoDollar[3].il} - r.setRange(protoDollar[1].il, protoDollar[3].il) - protoVAL.rngs = []*rangeNode{r} + protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, protoDollar[5].b) } - case 136: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:648 + case 124: + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:642 { - r := &rangeNode{startNode: protoDollar[1].il, endNode: protoDollar[3].id, endMax: true} - r.setRange(protoDollar[1].il, protoDollar[3].id) - protoVAL.rngs = []*rangeNode{r} + protoVAL.fld = ast.NewFieldNode(nil, protoDollar[1].tid, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, protoDollar[6].b) } - case 137: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:654 + case 125: + protoDollar = protoS[protopt-7 : protopt+1] +//line proto.y:646 { - i := &compoundIntNode{val: int64(protoDollar[1].i.val)} - i.setRange(protoDollar[1].i, protoDollar[1].i) - protoVAL.il = i + protoVAL.grp = ast.NewGroupNode(nil, protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, protoDollar[5].b, protoDollar[6].msgDecls, protoDollar[7].b) } - case 138: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:659 + case 126: + protoDollar = protoS[protopt-8 : protopt+1] +//line proto.y:649 { - if protoDollar[2].i.val > math.MaxInt64+1 { - lexError(protolex, protoDollar[2].i.start(), fmt.Sprintf("numeric constant %d would underflow 64-bit signed int (allowed range is %d to %d)", protoDollar[2].i.val, int64(math.MinInt64), int64(math.MaxInt64))) - } - i := &compoundIntNode{val: -int64(protoDollar[2].i.val)} - i.setRange(protoDollar[1].b, protoDollar[2].i) - protoVAL.il = i + protoVAL.grp = ast.NewGroupNode(nil, protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, protoDollar[6].b, protoDollar[7].msgDecls, protoDollar[8].b) } - case 139: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:668 + case 127: + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:653 + { + protoVAL.mapFld = ast.NewMapFieldNode(protoDollar[1].mapType, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, nil, protoDollar[5].b) + } + case 128: + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:656 + { + protoVAL.mapFld = ast.NewMapFieldNode(protoDollar[1].mapType, protoDollar[2].id, protoDollar[3].b, protoDollar[4].i, protoDollar[5].cmpctOpts, protoDollar[6].b) + } + case 129: + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:660 { - protoVAL.resvd = &reservedNode{ranges: protoDollar[2].rngs} - protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b) + protoVAL.mapType = ast.NewMapTypeNode(protoDollar[1].id.ToKeyword(), protoDollar[2].b, protoDollar[3].id, protoDollar[4].b, protoDollar[5].tid, protoDollar[6].b) } - case 141: + case 142: protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:674 +//line proto.y:677 { - protoVAL.resvd = &reservedNode{ranges: protoDollar[2].rngs} - protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b) + ranges, commas := protoDollar[2].rngs.toNodes() + protoVAL.ext = ast.NewExtensionRangeNode(protoDollar[1].id.ToKeyword(), ranges, commas, nil, protoDollar[3].b) } case 143: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:680 + protoDollar = protoS[protopt-4 : protopt+1] +//line proto.y:681 { - protoVAL.resvd = &reservedNode{names: protoDollar[2].names} - protoVAL.resvd.setRange(protoDollar[1].id, protoDollar[3].b) + ranges, commas := protoDollar[2].rngs.toNodes() + protoVAL.ext = ast.NewExtensionRangeNode(protoDollar[1].id.ToKeyword(), ranges, commas, protoDollar[3].cmpctOpts, protoDollar[4].b) } case 144: - protoDollar = protoS[protopt-3 : protopt+1] -//line proto.y:685 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:686 { - protoVAL.names = append(protoDollar[1].names, protoDollar[3].str) + protoVAL.rngs = &rangeList{protoDollar[1].rng, nil, nil} } case 145: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:688 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:689 { - protoVAL.names = []*compoundStringNode{protoDollar[1].str} + protoVAL.rngs = &rangeList{protoDollar[1].rng, protoDollar[2].b, protoDollar[3].rngs} } case 146: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:692 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:693 { - protoVAL.en = &enumNode{name: protoDollar[2].id, decls: protoDollar[4].enDecls} - protoVAL.en.setRange(protoDollar[1].id, protoDollar[5].b) + protoVAL.rng = ast.NewRangeNode(protoDollar[1].i, nil, nil, nil) } case 147: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:697 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:696 + { + protoVAL.rng = ast.NewRangeNode(protoDollar[1].i, protoDollar[2].id.ToKeyword(), protoDollar[3].i, nil) + } + case 148: + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:699 { - protoVAL.enDecls = append(protoDollar[1].enDecls, protoDollar[2].enDecls...) + protoVAL.rng = ast.NewRangeNode(protoDollar[1].i, protoDollar[2].id.ToKeyword(), nil, protoDollar[3].id.ToKeyword()) } case 149: - protoDollar = protoS[protopt-0 : protopt+1] -//line proto.y:701 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:703 { - protoVAL.enDecls = nil + protoVAL.rngs = &rangeList{protoDollar[1].rng, nil, nil} } case 150: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:705 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:706 { - protoVAL.enDecls = []*enumElement{{option: protoDollar[1].opts[0]}} + protoVAL.rngs = &rangeList{protoDollar[1].rng, protoDollar[2].b, protoDollar[3].rngs} } case 151: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:708 +//line proto.y:710 { - protoVAL.enDecls = []*enumElement{{value: protoDollar[1].env}} + protoVAL.rng = ast.NewRangeNode(protoDollar[1].il, nil, nil, nil) } case 152: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:711 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:713 { - protoVAL.enDecls = []*enumElement{{reserved: protoDollar[1].resvd}} + protoVAL.rng = ast.NewRangeNode(protoDollar[1].il, protoDollar[2].id.ToKeyword(), protoDollar[3].il, nil) } case 153: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:714 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:716 { - protoVAL.enDecls = []*enumElement{{empty: protoDollar[1].b}} + protoVAL.rng = ast.NewRangeNode(protoDollar[1].il, protoDollar[2].id.ToKeyword(), nil, protoDollar[3].id.ToKeyword()) } case 154: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:717 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:720 { + protoVAL.il = protoDollar[1].i } case 155: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:719 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:723 { + protoVAL.il = ast.NewNegativeIntLiteralNode(protoDollar[1].b, protoDollar[2].i) } case 156: - protoDollar = protoS[protopt-4 : protopt+1] -//line proto.y:722 - { - protoVAL.env = &enumValueNode{name: protoDollar[1].id, number: protoDollar[3].il} - protoVAL.env.setRange(protoDollar[1].id, protoDollar[4].b) - } - case 157: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:726 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:727 { - protoVAL.env = &enumValueNode{name: protoDollar[1].id, number: protoDollar[3].il, options: protoDollar[4].cmpctOpts} - protoVAL.env.setRange(protoDollar[1].id, protoDollar[5].b) + ranges, commas := protoDollar[2].rngs.toNodes() + protoVAL.resvd = ast.NewReservedRangesNode(protoDollar[1].id.ToKeyword(), ranges, commas, protoDollar[3].b) } case 158: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:731 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:733 { - protoVAL.msg = &messageNode{name: protoDollar[2].id, decls: protoDollar[4].msgDecls} - protoVAL.msg.setRange(protoDollar[1].id, protoDollar[5].b) + ranges, commas := protoDollar[2].rngs.toNodes() + protoVAL.resvd = ast.NewReservedRangesNode(protoDollar[1].id.ToKeyword(), ranges, commas, protoDollar[3].b) } - case 159: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:736 + case 160: + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:739 { - protoVAL.msgDecls = append(protoDollar[1].msgDecls, protoDollar[2].msgDecls...) + names, commas := protoDollar[2].names.toNodes() + protoVAL.resvd = ast.NewReservedNamesNode(protoDollar[1].id.ToKeyword(), names, commas, protoDollar[3].b) } case 161: - protoDollar = protoS[protopt-0 : protopt+1] -//line proto.y:740 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:744 { - protoVAL.msgDecls = nil + protoVAL.names = &nameList{protoDollar[1].str.toStringValueNode(), nil, nil} } case 162: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:744 + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:747 { - protoVAL.msgDecls = []*messageElement{{field: protoDollar[1].fld}} + protoVAL.names = &nameList{protoDollar[1].str.toStringValueNode(), protoDollar[2].b, protoDollar[3].names} } case 163: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:747 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:751 { - protoVAL.msgDecls = []*messageElement{{enum: protoDollar[1].en}} + protoVAL.en = ast.NewEnumNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].enDecls, protoDollar[5].b) } case 164: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:750 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:755 { - protoVAL.msgDecls = []*messageElement{{nested: protoDollar[1].msg}} + if protoDollar[2].enDecl != nil { + protoVAL.enDecls = append(protoDollar[1].enDecls, protoDollar[2].enDecl) + } else { + protoVAL.enDecls = protoDollar[1].enDecls + } } case 165: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:753 +//line proto.y:762 { - protoVAL.msgDecls = []*messageElement{{extend: protoDollar[1].extend}} + if protoDollar[1].enDecl != nil { + protoVAL.enDecls = []ast.EnumElement{protoDollar[1].enDecl} + } else { + protoVAL.enDecls = nil + } } case 166: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:756 + protoDollar = protoS[protopt-0 : protopt+1] +//line proto.y:769 { - protoVAL.msgDecls = []*messageElement{{extensionRange: protoDollar[1].ext}} + protoVAL.enDecls = nil } case 167: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:759 +//line proto.y:773 { - protoVAL.msgDecls = []*messageElement{{group: protoDollar[1].grp}} + protoVAL.enDecl = protoDollar[1].opt } case 168: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:762 +//line proto.y:776 { - protoVAL.msgDecls = []*messageElement{{option: protoDollar[1].opts[0]}} + protoVAL.enDecl = protoDollar[1].env } case 169: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:765 +//line proto.y:779 { - protoVAL.msgDecls = []*messageElement{{oneOf: protoDollar[1].oo}} + protoVAL.enDecl = protoDollar[1].resvd } case 170: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:768 +//line proto.y:782 { - protoVAL.msgDecls = []*messageElement{{mapField: protoDollar[1].mapFld}} + protoVAL.enDecl = ast.NewEmptyDeclNode(protoDollar[1].b) } case 171: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:771 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:785 { - protoVAL.msgDecls = []*messageElement{{reserved: protoDollar[1].resvd}} + protoVAL.enDecl = nil } case 172: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:774 +//line proto.y:788 { - protoVAL.msgDecls = []*messageElement{{empty: protoDollar[1].b}} + protoVAL.enDecl = nil } case 173: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:777 + protoDollar = protoS[protopt-4 : protopt+1] +//line proto.y:792 { + protoVAL.env = ast.NewEnumValueNode(protoDollar[1].id, protoDollar[2].b, protoDollar[3].il, nil, protoDollar[4].b) } case 174: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:779 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:795 { + protoVAL.env = ast.NewEnumValueNode(protoDollar[1].id, protoDollar[2].b, protoDollar[3].il, protoDollar[4].cmpctOpts, protoDollar[5].b) } case 175: protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:782 +//line proto.y:799 { - protoVAL.extend = &extendNode{extendee: protoDollar[2].cid, decls: protoDollar[4].extDecls} - protoVAL.extend.setRange(protoDollar[1].id, protoDollar[5].b) + protoVAL.msg = ast.NewMessageNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].msgDecls, protoDollar[5].b) } case 176: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:787 +//line proto.y:803 { - protoVAL.extDecls = append(protoDollar[1].extDecls, protoDollar[2].extDecls...) + if protoDollar[2].msgDecl != nil { + protoVAL.msgDecls = append(protoDollar[1].msgDecls, protoDollar[2].msgDecl) + } else { + protoVAL.msgDecls = protoDollar[1].msgDecls + } + } + case 177: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:810 + { + if protoDollar[1].msgDecl != nil { + protoVAL.msgDecls = []ast.MessageElement{protoDollar[1].msgDecl} + } else { + protoVAL.msgDecls = nil + } } case 178: protoDollar = protoS[protopt-0 : protopt+1] -//line proto.y:791 +//line proto.y:817 { - protoVAL.extDecls = nil + protoVAL.msgDecls = nil } case 179: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:795 +//line proto.y:821 { - protoVAL.extDecls = []*extendElement{{field: protoDollar[1].fld}} + protoVAL.msgDecl = protoDollar[1].fld } case 180: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:798 +//line proto.y:824 { - protoVAL.extDecls = []*extendElement{{group: protoDollar[1].grp}} + protoVAL.msgDecl = protoDollar[1].en } case 181: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:801 +//line proto.y:827 { - protoVAL.extDecls = []*extendElement{{empty: protoDollar[1].b}} + protoVAL.msgDecl = protoDollar[1].msg } case 182: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:804 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:830 { + protoVAL.msgDecl = protoDollar[1].extend } case 183: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:806 +//line proto.y:833 { + protoVAL.msgDecl = protoDollar[1].ext } case 184: - protoDollar = protoS[protopt-5 : protopt+1] -//line proto.y:809 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:836 { - protoVAL.svc = &serviceNode{name: protoDollar[2].id, decls: protoDollar[4].svcDecls} - protoVAL.svc.setRange(protoDollar[1].id, protoDollar[5].b) + protoVAL.msgDecl = protoDollar[1].grp } case 185: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:814 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:839 + { + protoVAL.msgDecl = protoDollar[1].opt + } + case 186: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:842 { - protoVAL.svcDecls = append(protoDollar[1].svcDecls, protoDollar[2].svcDecls...) + protoVAL.msgDecl = protoDollar[1].oo } case 187: - protoDollar = protoS[protopt-0 : protopt+1] -//line proto.y:818 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:845 { - protoVAL.svcDecls = nil + protoVAL.msgDecl = protoDollar[1].mapFld } case 188: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:825 +//line proto.y:848 { - protoVAL.svcDecls = []*serviceElement{{option: protoDollar[1].opts[0]}} + protoVAL.msgDecl = protoDollar[1].resvd } case 189: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:828 +//line proto.y:851 { - protoVAL.svcDecls = []*serviceElement{{rpc: protoDollar[1].mtd}} + protoVAL.msgDecl = ast.NewEmptyDeclNode(protoDollar[1].b) } case 190: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:831 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:854 { - protoVAL.svcDecls = []*serviceElement{{empty: protoDollar[1].b}} + protoVAL.msgDecl = nil } case 191: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:834 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:857 { + protoVAL.msgDecl = nil } case 192: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:836 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:861 { + protoVAL.extend = ast.NewExtendNode(protoDollar[1].id.ToKeyword(), protoDollar[2].tid, protoDollar[3].b, protoDollar[4].extDecls, protoDollar[5].b) } case 193: - protoDollar = protoS[protopt-10 : protopt+1] -//line proto.y:839 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:865 { - protoVAL.mtd = &methodNode{name: protoDollar[2].id, input: protoDollar[4].rpcType, output: protoDollar[8].rpcType} - protoVAL.mtd.setRange(protoDollar[1].id, protoDollar[10].b) + if protoDollar[2].extDecl != nil { + protoVAL.extDecls = append(protoDollar[1].extDecls, protoDollar[2].extDecl) + } else { + protoVAL.extDecls = protoDollar[1].extDecls + } } case 194: - protoDollar = protoS[protopt-12 : protopt+1] -//line proto.y:843 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:872 { - protoVAL.mtd = &methodNode{name: protoDollar[2].id, input: protoDollar[4].rpcType, output: protoDollar[8].rpcType, options: protoDollar[11].opts} - protoVAL.mtd.setRange(protoDollar[1].id, protoDollar[12].b) + if protoDollar[1].extDecl != nil { + protoVAL.extDecls = []ast.ExtendElement{protoDollar[1].extDecl} + } else { + protoVAL.extDecls = nil + } } case 195: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:848 + protoDollar = protoS[protopt-0 : protopt+1] +//line proto.y:879 { - protoVAL.rpcType = &rpcTypeNode{msgType: protoDollar[2].cid, streamKeyword: protoDollar[1].id} - protoVAL.rpcType.setRange(protoDollar[1].id, protoDollar[2].cid) + protoVAL.extDecls = nil } case 196: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:852 +//line proto.y:883 { - protoVAL.rpcType = &rpcTypeNode{msgType: protoDollar[1].cid} - protoVAL.rpcType.setRange(protoDollar[1].cid, protoDollar[1].cid) + protoVAL.extDecl = protoDollar[1].fld } case 197: - protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:857 + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:886 { - protoVAL.opts = append(protoDollar[1].opts, protoDollar[2].opts...) + protoVAL.extDecl = protoDollar[1].grp + } + case 198: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:889 + { + protoVAL.extDecl = ast.NewEmptyDeclNode(protoDollar[1].b) } case 199: - protoDollar = protoS[protopt-0 : protopt+1] -//line proto.y:861 + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:892 { - protoVAL.opts = []*optionNode{} + protoVAL.extDecl = nil } case 200: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:865 +//line proto.y:895 { - protoVAL.opts = protoDollar[1].opts + protoVAL.extDecl = nil } case 201: - protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:868 + protoDollar = protoS[protopt-5 : protopt+1] +//line proto.y:899 { - protoVAL.opts = []*optionNode{} + protoVAL.svc = ast.NewServiceNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].b, protoDollar[4].svcDecls, protoDollar[5].b) } case 202: protoDollar = protoS[protopt-2 : protopt+1] -//line proto.y:871 +//line proto.y:903 { + if protoDollar[2].svcDecl != nil { + protoVAL.svcDecls = append(protoDollar[1].svcDecls, protoDollar[2].svcDecl) + } else { + protoVAL.svcDecls = protoDollar[1].svcDecls + } } case 203: protoDollar = protoS[protopt-1 : protopt+1] -//line proto.y:873 +//line proto.y:910 + { + if protoDollar[1].svcDecl != nil { + protoVAL.svcDecls = []ast.ServiceElement{protoDollar[1].svcDecl} + } else { + protoVAL.svcDecls = nil + } + } + case 204: + protoDollar = protoS[protopt-0 : protopt+1] +//line proto.y:917 + { + protoVAL.svcDecls = nil + } + case 205: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:924 + { + protoVAL.svcDecl = protoDollar[1].opt + } + case 206: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:927 + { + protoVAL.svcDecl = protoDollar[1].mtd + } + case 207: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:930 + { + protoVAL.svcDecl = ast.NewEmptyDeclNode(protoDollar[1].b) + } + case 208: + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:933 + { + protoVAL.svcDecl = nil + } + case 209: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:936 + { + protoVAL.svcDecl = nil + } + case 210: + protoDollar = protoS[protopt-6 : protopt+1] +//line proto.y:940 + { + protoVAL.mtd = ast.NewRPCNode(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].rpcType, protoDollar[4].id.ToKeyword(), protoDollar[5].rpcType, protoDollar[6].b) + } + case 211: + protoDollar = protoS[protopt-8 : protopt+1] +//line proto.y:943 + { + protoVAL.mtd = ast.NewRPCNodeWithBody(protoDollar[1].id.ToKeyword(), protoDollar[2].id, protoDollar[3].rpcType, protoDollar[4].id.ToKeyword(), protoDollar[5].rpcType, protoDollar[6].b, protoDollar[7].rpcDecls, protoDollar[8].b) + } + case 212: + protoDollar = protoS[protopt-4 : protopt+1] +//line proto.y:947 + { + protoVAL.rpcType = ast.NewRPCTypeNode(protoDollar[1].b, protoDollar[2].id.ToKeyword(), protoDollar[3].tid, protoDollar[4].b) + } + case 213: + protoDollar = protoS[protopt-3 : protopt+1] +//line proto.y:950 + { + protoVAL.rpcType = ast.NewRPCTypeNode(protoDollar[1].b, nil, protoDollar[2].tid, protoDollar[3].b) + } + case 214: + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:954 + { + if protoDollar[2].rpcDecl != nil { + protoVAL.rpcDecls = append(protoDollar[1].rpcDecls, protoDollar[2].rpcDecl) + } else { + protoVAL.rpcDecls = protoDollar[1].rpcDecls + } + } + case 215: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:961 + { + if protoDollar[1].rpcDecl != nil { + protoVAL.rpcDecls = []ast.RPCElement{protoDollar[1].rpcDecl} + } else { + protoVAL.rpcDecls = nil + } + } + case 216: + protoDollar = protoS[protopt-0 : protopt+1] +//line proto.y:968 + { + protoVAL.rpcDecls = nil + } + case 217: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:972 + { + protoVAL.rpcDecl = protoDollar[1].opt + } + case 218: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:975 + { + protoVAL.rpcDecl = ast.NewEmptyDeclNode(protoDollar[1].b) + } + case 219: + protoDollar = protoS[protopt-2 : protopt+1] +//line proto.y:978 + { + protoVAL.rpcDecl = nil + } + case 220: + protoDollar = protoS[protopt-1 : protopt+1] +//line proto.y:981 { + protoVAL.rpcDecl = nil } } goto protostack /* stack new state and value */ diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go index d5cc04710d1..2104c59a105 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/source_code_info.go @@ -8,6 +8,7 @@ import ( dpb "github.com/golang/protobuf/protoc-gen-go/descriptor" "github.com/jhump/protoreflect/desc/internal" + "github.com/jhump/protoreflect/desc/protoparse/ast" ) func (r *parseResult) generateSourceCodeInfo() *dpb.SourceCodeInfo { @@ -17,37 +18,37 @@ func (r *parseResult) generateSourceCodeInfo() *dpb.SourceCodeInfo { return nil } - sci := sourceCodeInfo{commentsUsed: map[*comment]struct{}{}} + sci := sourceCodeInfo{commentsUsed: map[*ast.Comment]struct{}{}} path := make([]int32, 0, 10) - fn := r.getFileNode(r.fd).(*fileNode) + fn := r.getFileNode(r.fd).(*ast.FileNode) sci.newLocWithoutComments(fn, nil) - if fn.syntax != nil { - sci.newLoc(fn.syntax, append(path, internal.File_syntaxTag)) + if fn.Syntax != nil { + sci.newLoc(fn.Syntax, append(path, internal.File_syntaxTag)) } var depIndex, optIndex, msgIndex, enumIndex, extendIndex, svcIndex int32 - for _, child := range fn.decls { - switch { - case child.imp != nil: - sci.newLoc(child.imp, append(path, internal.File_dependencyTag, int32(depIndex))) + for _, child := range fn.Decls { + switch child := child.(type) { + case *ast.ImportNode: + sci.newLoc(child, append(path, internal.File_dependencyTag, int32(depIndex))) depIndex++ - case child.pkg != nil: - sci.newLoc(child.pkg, append(path, internal.File_packageTag)) - case child.option != nil: - r.generateSourceCodeInfoForOption(&sci, child.option, false, &optIndex, append(path, internal.File_optionsTag)) - case child.message != nil: - r.generateSourceCodeInfoForMessage(&sci, child.message, nil, append(path, internal.File_messagesTag, msgIndex)) + case *ast.PackageNode: + sci.newLoc(child, append(path, internal.File_packageTag)) + case *ast.OptionNode: + r.generateSourceCodeInfoForOption(&sci, child, false, &optIndex, append(path, internal.File_optionsTag)) + case *ast.MessageNode: + r.generateSourceCodeInfoForMessage(&sci, child, nil, append(path, internal.File_messagesTag, msgIndex)) msgIndex++ - case child.enum != nil: - r.generateSourceCodeInfoForEnum(&sci, child.enum, append(path, internal.File_enumsTag, enumIndex)) + case *ast.EnumNode: + r.generateSourceCodeInfoForEnum(&sci, child, append(path, internal.File_enumsTag, enumIndex)) enumIndex++ - case child.extend != nil: - r.generateSourceCodeInfoForExtensions(&sci, child.extend, &extendIndex, &msgIndex, append(path, internal.File_extensionsTag), append(dup(path), internal.File_messagesTag)) - case child.service != nil: - r.generateSourceCodeInfoForService(&sci, child.service, append(path, internal.File_servicesTag, svcIndex)) + case *ast.ExtendNode: + r.generateSourceCodeInfoForExtensions(&sci, child, &extendIndex, &msgIndex, append(path, internal.File_extensionsTag), append(dup(path), internal.File_messagesTag)) + case *ast.ServiceNode: + r.generateSourceCodeInfoForService(&sci, child, append(path, internal.File_servicesTag, svcIndex)) svcIndex++ } } @@ -55,7 +56,7 @@ func (r *parseResult) generateSourceCodeInfo() *dpb.SourceCodeInfo { return &dpb.SourceCodeInfo{Location: sci.locs} } -func (r *parseResult) generateSourceCodeInfoForOption(sci *sourceCodeInfo, n *optionNode, compact bool, uninterpIndex *int32, path []int32) { +func (r *parseResult) generateSourceCodeInfoForOption(sci *sourceCodeInfo, n *ast.OptionNode, compact bool, uninterpIndex *int32, path []int32) { if !compact { sci.newLocWithoutComments(n, path) } @@ -79,95 +80,96 @@ func (r *parseResult) generateSourceCodeInfoForOption(sci *sourceCodeInfo, n *op *uninterpIndex++ sci.newLoc(n, optPath) var valTag int32 - switch n.val.(type) { - case *compoundIdentNode: + switch n.Val.(type) { + case ast.IdentValueNode: valTag = internal.Uninterpreted_identTag - case *intLiteralNode: - valTag = internal.Uninterpreted_posIntTag - case *compoundIntNode: + case *ast.NegativeIntLiteralNode: valTag = internal.Uninterpreted_negIntTag - case *compoundFloatNode: + case ast.IntValueNode: + valTag = internal.Uninterpreted_posIntTag + case ast.FloatValueNode: valTag = internal.Uninterpreted_doubleTag - case *compoundStringNode: + case ast.StringValueNode: valTag = internal.Uninterpreted_stringTag - case *aggregateLiteralNode: + case *ast.MessageLiteralNode: valTag = internal.Uninterpreted_aggregateTag } if valTag != 0 { - sci.newLoc(n.val, append(optPath, valTag)) + sci.newLoc(n.Val, append(optPath, valTag)) } - for j, nn := range n.name.parts { + for j, nn := range n.Name.Parts { optNmPath := append(optPath, internal.Uninterpreted_nameTag, int32(j)) sci.newLoc(nn, optNmPath) - sci.newLoc(nn.text, append(optNmPath, internal.UninterpretedName_nameTag)) + sci.newLoc(nn.Name, append(optNmPath, internal.UninterpretedName_nameTag)) } } -func (r *parseResult) generateSourceCodeInfoForMessage(sci *sourceCodeInfo, n msgDecl, fieldPath []int32, path []int32) { +func (r *parseResult) generateSourceCodeInfoForMessage(sci *sourceCodeInfo, n ast.MessageDeclNode, fieldPath []int32, path []int32) { sci.newLoc(n, path) - var decls []*messageElement + var decls []ast.MessageElement switch n := n.(type) { - case *messageNode: - decls = n.decls - case *groupNode: - decls = n.decls - case *mapFieldNode: + case *ast.MessageNode: + decls = n.Decls + case *ast.GroupNode: + decls = n.Decls + case *ast.MapFieldNode: // map entry so nothing else to do return } - sci.newLoc(n.messageName(), append(path, internal.Message_nameTag)) + sci.newLoc(n.MessageName(), append(path, internal.Message_nameTag)) // matching protoc, which emits the corresponding field type name (for group fields) // right after the source location for the group message name if fieldPath != nil { - sci.newLoc(n.messageName(), append(fieldPath, internal.Field_typeNameTag)) + sci.newLoc(n.MessageName(), append(fieldPath, internal.Field_typeNameTag)) } var optIndex, fieldIndex, oneOfIndex, extendIndex, nestedMsgIndex int32 var nestedEnumIndex, extRangeIndex, reservedRangeIndex, reservedNameIndex int32 for _, child := range decls { - switch { - case child.option != nil: - r.generateSourceCodeInfoForOption(sci, child.option, false, &optIndex, append(path, internal.Message_optionsTag)) - case child.field != nil: - r.generateSourceCodeInfoForField(sci, child.field, append(path, internal.Message_fieldsTag, fieldIndex)) + switch child := child.(type) { + case *ast.OptionNode: + r.generateSourceCodeInfoForOption(sci, child, false, &optIndex, append(path, internal.Message_optionsTag)) + case *ast.FieldNode: + r.generateSourceCodeInfoForField(sci, child, append(path, internal.Message_fieldsTag, fieldIndex)) fieldIndex++ - case child.group != nil: + case *ast.GroupNode: fldPath := append(path, internal.Message_fieldsTag, fieldIndex) - r.generateSourceCodeInfoForField(sci, child.group, fldPath) + r.generateSourceCodeInfoForField(sci, child, fldPath) fieldIndex++ - r.generateSourceCodeInfoForMessage(sci, child.group, fldPath, append(dup(path), internal.Message_nestedMessagesTag, nestedMsgIndex)) + r.generateSourceCodeInfoForMessage(sci, child, fldPath, append(dup(path), internal.Message_nestedMessagesTag, nestedMsgIndex)) nestedMsgIndex++ - case child.mapField != nil: - r.generateSourceCodeInfoForField(sci, child.mapField, append(path, internal.Message_fieldsTag, fieldIndex)) + case *ast.MapFieldNode: + r.generateSourceCodeInfoForField(sci, child, append(path, internal.Message_fieldsTag, fieldIndex)) fieldIndex++ - case child.oneOf != nil: - r.generateSourceCodeInfoForOneOf(sci, child.oneOf, &fieldIndex, &nestedMsgIndex, append(path, internal.Message_fieldsTag), append(dup(path), internal.Message_nestedMessagesTag), append(dup(path), internal.Message_oneOfsTag, oneOfIndex)) + nestedMsgIndex++ + case *ast.OneOfNode: + r.generateSourceCodeInfoForOneOf(sci, child, &fieldIndex, &nestedMsgIndex, append(path, internal.Message_fieldsTag), append(dup(path), internal.Message_nestedMessagesTag), append(dup(path), internal.Message_oneOfsTag, oneOfIndex)) oneOfIndex++ - case child.nested != nil: - r.generateSourceCodeInfoForMessage(sci, child.nested, nil, append(path, internal.Message_nestedMessagesTag, nestedMsgIndex)) + case *ast.MessageNode: + r.generateSourceCodeInfoForMessage(sci, child, nil, append(path, internal.Message_nestedMessagesTag, nestedMsgIndex)) nestedMsgIndex++ - case child.enum != nil: - r.generateSourceCodeInfoForEnum(sci, child.enum, append(path, internal.Message_enumsTag, nestedEnumIndex)) + case *ast.EnumNode: + r.generateSourceCodeInfoForEnum(sci, child, append(path, internal.Message_enumsTag, nestedEnumIndex)) nestedEnumIndex++ - case child.extend != nil: - r.generateSourceCodeInfoForExtensions(sci, child.extend, &extendIndex, &nestedMsgIndex, append(path, internal.Message_extensionsTag), append(dup(path), internal.Message_nestedMessagesTag)) - case child.extensionRange != nil: - r.generateSourceCodeInfoForExtensionRanges(sci, child.extensionRange, &extRangeIndex, append(path, internal.Message_extensionRangeTag)) - case child.reserved != nil: - if len(child.reserved.names) > 0 { + case *ast.ExtendNode: + r.generateSourceCodeInfoForExtensions(sci, child, &extendIndex, &nestedMsgIndex, append(path, internal.Message_extensionsTag), append(dup(path), internal.Message_nestedMessagesTag)) + case *ast.ExtensionRangeNode: + r.generateSourceCodeInfoForExtensionRanges(sci, child, &extRangeIndex, append(path, internal.Message_extensionRangeTag)) + case *ast.ReservedNode: + if len(child.Names) > 0 { resPath := append(path, internal.Message_reservedNameTag) - sci.newLoc(child.reserved, resPath) - for _, rn := range child.reserved.names { + sci.newLoc(child, resPath) + for _, rn := range child.Names { sci.newLoc(rn, append(resPath, reservedNameIndex)) reservedNameIndex++ } } - if len(child.reserved.ranges) > 0 { + if len(child.Ranges) > 0 { resPath := append(path, internal.Message_reservedRangeTag) - sci.newLoc(child.reserved, resPath) - for _, rr := range child.reserved.ranges { + sci.newLoc(child, resPath) + for _, rr := range child.Ranges { r.generateSourceCodeInfoForReservedRange(sci, rr, append(resPath, reservedRangeIndex)) reservedRangeIndex++ } @@ -176,31 +178,31 @@ func (r *parseResult) generateSourceCodeInfoForMessage(sci *sourceCodeInfo, n ms } } -func (r *parseResult) generateSourceCodeInfoForEnum(sci *sourceCodeInfo, n *enumNode, path []int32) { +func (r *parseResult) generateSourceCodeInfoForEnum(sci *sourceCodeInfo, n *ast.EnumNode, path []int32) { sci.newLoc(n, path) - sci.newLoc(n.name, append(path, internal.Enum_nameTag)) + sci.newLoc(n.Name, append(path, internal.Enum_nameTag)) var optIndex, valIndex, reservedNameIndex, reservedRangeIndex int32 - for _, child := range n.decls { - switch { - case child.option != nil: - r.generateSourceCodeInfoForOption(sci, child.option, false, &optIndex, append(path, internal.Enum_optionsTag)) - case child.value != nil: - r.generateSourceCodeInfoForEnumValue(sci, child.value, append(path, internal.Enum_valuesTag, valIndex)) + for _, child := range n.Decls { + switch child := child.(type) { + case *ast.OptionNode: + r.generateSourceCodeInfoForOption(sci, child, false, &optIndex, append(path, internal.Enum_optionsTag)) + case *ast.EnumValueNode: + r.generateSourceCodeInfoForEnumValue(sci, child, append(path, internal.Enum_valuesTag, valIndex)) valIndex++ - case child.reserved != nil: - if len(child.reserved.names) > 0 { + case *ast.ReservedNode: + if len(child.Names) > 0 { resPath := append(path, internal.Enum_reservedNameTag) - sci.newLoc(child.reserved, resPath) - for _, rn := range child.reserved.names { + sci.newLoc(child, resPath) + for _, rn := range child.Names { sci.newLoc(rn, append(resPath, reservedNameIndex)) reservedNameIndex++ } } - if len(child.reserved.ranges) > 0 { + if len(child.Ranges) > 0 { resPath := append(path, internal.Enum_reservedRangeTag) - sci.newLoc(child.reserved, resPath) - for _, rr := range child.reserved.ranges { + sci.newLoc(child, resPath) + for _, rr := range child.Ranges { r.generateSourceCodeInfoForReservedRange(sci, rr, append(resPath, reservedRangeIndex)) reservedRangeIndex++ } @@ -209,113 +211,99 @@ func (r *parseResult) generateSourceCodeInfoForEnum(sci *sourceCodeInfo, n *enum } } -func (r *parseResult) generateSourceCodeInfoForEnumValue(sci *sourceCodeInfo, n *enumValueNode, path []int32) { +func (r *parseResult) generateSourceCodeInfoForEnumValue(sci *sourceCodeInfo, n *ast.EnumValueNode, path []int32) { sci.newLoc(n, path) - sci.newLoc(n.name, append(path, internal.EnumVal_nameTag)) - sci.newLoc(n.getNumber(), append(path, internal.EnumVal_numberTag)) + sci.newLoc(n.Name, append(path, internal.EnumVal_nameTag)) + sci.newLoc(n.Number, append(path, internal.EnumVal_numberTag)) // enum value options - if n.options != nil { + if n.Options != nil { optsPath := append(path, internal.EnumVal_optionsTag) - sci.newLoc(n.options, optsPath) + sci.newLoc(n.Options, optsPath) var optIndex int32 - for _, opt := range n.options.decls { + for _, opt := range n.Options.GetElements() { r.generateSourceCodeInfoForOption(sci, opt, true, &optIndex, optsPath) } } } -func (r *parseResult) generateSourceCodeInfoForReservedRange(sci *sourceCodeInfo, n *rangeNode, path []int32) { +func (r *parseResult) generateSourceCodeInfoForReservedRange(sci *sourceCodeInfo, n *ast.RangeNode, path []int32) { sci.newLoc(n, path) - sci.newLoc(n.startNode, append(path, internal.ReservedRange_startTag)) - if n.endNode != nil { - sci.newLoc(n.endNode, append(path, internal.ReservedRange_endTag)) + sci.newLoc(n.StartVal, append(path, internal.ReservedRange_startTag)) + if n.EndVal != nil { + sci.newLoc(n.EndVal, append(path, internal.ReservedRange_endTag)) + } else if n.Max != nil { + sci.newLoc(n.Max, append(path, internal.ReservedRange_endTag)) } } -func (r *parseResult) generateSourceCodeInfoForExtensions(sci *sourceCodeInfo, n *extendNode, extendIndex, msgIndex *int32, extendPath, msgPath []int32) { +func (r *parseResult) generateSourceCodeInfoForExtensions(sci *sourceCodeInfo, n *ast.ExtendNode, extendIndex, msgIndex *int32, extendPath, msgPath []int32) { sci.newLoc(n, extendPath) - for _, decl := range n.decls { - switch { - case decl.field != nil: - r.generateSourceCodeInfoForField(sci, decl.field, append(extendPath, *extendIndex)) + for _, decl := range n.Decls { + switch decl := decl.(type) { + case *ast.FieldNode: + r.generateSourceCodeInfoForField(sci, decl, append(extendPath, *extendIndex)) *extendIndex++ - case decl.group != nil: + case *ast.GroupNode: fldPath := append(extendPath, *extendIndex) - r.generateSourceCodeInfoForField(sci, decl.group, fldPath) + r.generateSourceCodeInfoForField(sci, decl, fldPath) *extendIndex++ - r.generateSourceCodeInfoForMessage(sci, decl.group, fldPath, append(msgPath, *msgIndex)) + r.generateSourceCodeInfoForMessage(sci, decl, fldPath, append(msgPath, *msgIndex)) *msgIndex++ } } } -func (r *parseResult) generateSourceCodeInfoForOneOf(sci *sourceCodeInfo, n *oneOfNode, fieldIndex, nestedMsgIndex *int32, fieldPath, nestedMsgPath, oneOfPath []int32) { +func (r *parseResult) generateSourceCodeInfoForOneOf(sci *sourceCodeInfo, n *ast.OneOfNode, fieldIndex, nestedMsgIndex *int32, fieldPath, nestedMsgPath, oneOfPath []int32) { sci.newLoc(n, oneOfPath) - sci.newLoc(n.name, append(oneOfPath, internal.OneOf_nameTag)) + sci.newLoc(n.Name, append(oneOfPath, internal.OneOf_nameTag)) var optIndex int32 - for _, child := range n.decls { - switch { - case child.option != nil: - r.generateSourceCodeInfoForOption(sci, child.option, false, &optIndex, append(oneOfPath, internal.OneOf_optionsTag)) - case child.field != nil: - r.generateSourceCodeInfoForField(sci, child.field, append(fieldPath, *fieldIndex)) + for _, child := range n.Decls { + switch child := child.(type) { + case *ast.OptionNode: + r.generateSourceCodeInfoForOption(sci, child, false, &optIndex, append(oneOfPath, internal.OneOf_optionsTag)) + case *ast.FieldNode: + r.generateSourceCodeInfoForField(sci, child, append(fieldPath, *fieldIndex)) *fieldIndex++ - case child.group != nil: + case *ast.GroupNode: fldPath := append(fieldPath, *fieldIndex) - r.generateSourceCodeInfoForField(sci, child.group, fldPath) + r.generateSourceCodeInfoForField(sci, child, fldPath) *fieldIndex++ - r.generateSourceCodeInfoForMessage(sci, child.group, fldPath, append(nestedMsgPath, *nestedMsgIndex)) + r.generateSourceCodeInfoForMessage(sci, child, fldPath, append(nestedMsgPath, *nestedMsgIndex)) *nestedMsgIndex++ } } } -func (r *parseResult) generateSourceCodeInfoForField(sci *sourceCodeInfo, n fieldDecl, path []int32) { - isGroup := false - var opts *compactOptionsNode - var extendee *extendNode +func (r *parseResult) generateSourceCodeInfoForField(sci *sourceCodeInfo, n ast.FieldDeclNode, path []int32) { var fieldType string - switch n := n.(type) { - case *fieldNode: - opts = n.options - extendee = n.extendee - fieldType = n.fldType.val - case *mapFieldNode: - opts = n.options - case *groupNode: - isGroup = true - extendee = n.extendee - case *syntheticMapField: - // shouldn't get here since we don't recurse into fields from a mapNode - // in generateSourceCodeInfoForMessage... but just in case - return + if f, ok := n.(*ast.FieldNode); ok { + fieldType = string(f.FldType.AsIdentifier()) } - if isGroup { + if n.GetGroupKeyword() != nil { // comments will appear on group message sci.newLocWithoutComments(n, path) - if extendee != nil { - sci.newLoc(extendee.extendee, append(path, internal.Field_extendeeTag)) + if n.FieldExtendee() != nil { + sci.newLoc(n.FieldExtendee(), append(path, internal.Field_extendeeTag)) } - if n.fieldLabel() != nil { + if n.FieldLabel() != nil { // no comments here either (label is first token for group, so we want // to leave the comments to be associated with the group message instead) - sci.newLocWithoutComments(n.fieldLabel(), append(path, internal.Field_labelTag)) + sci.newLocWithoutComments(n.FieldLabel(), append(path, internal.Field_labelTag)) } - sci.newLoc(n.fieldType(), append(path, internal.Field_typeTag)) + sci.newLoc(n.FieldType(), append(path, internal.Field_typeTag)) // let the name comments be attributed to the group name - sci.newLocWithoutComments(n.fieldName(), append(path, internal.Field_nameTag)) + sci.newLocWithoutComments(n.FieldName(), append(path, internal.Field_nameTag)) } else { sci.newLoc(n, path) - if extendee != nil { - sci.newLoc(extendee.extendee, append(path, internal.Field_extendeeTag)) + if n.FieldExtendee() != nil { + sci.newLoc(n.FieldExtendee(), append(path, internal.Field_extendeeTag)) } - if n.fieldLabel() != nil { - sci.newLoc(n.fieldLabel(), append(path, internal.Field_labelTag)) + if n.FieldLabel() != nil { + sci.newLoc(n.FieldLabel(), append(path, internal.Field_labelTag)) } - n.fieldType() var tag int32 if _, isScalar := fieldTypes[fieldType]; isScalar { tag = internal.Field_typeTag @@ -324,93 +312,97 @@ func (r *parseResult) generateSourceCodeInfoForField(sci *sourceCodeInfo, n fiel // to the type name field tag = internal.Field_typeNameTag } - sci.newLoc(n.fieldType(), append(path, tag)) - sci.newLoc(n.fieldName(), append(path, internal.Field_nameTag)) + sci.newLoc(n.FieldType(), append(path, tag)) + sci.newLoc(n.FieldName(), append(path, internal.Field_nameTag)) } - sci.newLoc(n.fieldTag(), append(path, internal.Field_numberTag)) + sci.newLoc(n.FieldTag(), append(path, internal.Field_numberTag)) - if opts != nil { + if n.GetOptions() != nil { optsPath := append(path, internal.Field_optionsTag) - sci.newLoc(opts, optsPath) + sci.newLoc(n.GetOptions(), optsPath) var optIndex int32 - for _, opt := range opts.decls { + for _, opt := range n.GetOptions().GetElements() { r.generateSourceCodeInfoForOption(sci, opt, true, &optIndex, optsPath) } } } -func (r *parseResult) generateSourceCodeInfoForExtensionRanges(sci *sourceCodeInfo, n *extensionRangeNode, extRangeIndex *int32, path []int32) { +func (r *parseResult) generateSourceCodeInfoForExtensionRanges(sci *sourceCodeInfo, n *ast.ExtensionRangeNode, extRangeIndex *int32, path []int32) { sci.newLoc(n, path) - for _, child := range n.ranges { + for _, child := range n.Ranges { path := append(path, *extRangeIndex) *extRangeIndex++ sci.newLoc(child, path) - sci.newLoc(child.startNode, append(path, internal.ExtensionRange_startTag)) - if child.endNode != nil { - sci.newLoc(child.endNode, append(path, internal.ExtensionRange_endTag)) + sci.newLoc(child.StartVal, append(path, internal.ExtensionRange_startTag)) + if child.EndVal != nil { + sci.newLoc(child.EndVal, append(path, internal.ExtensionRange_endTag)) + } else if child.Max != nil { + sci.newLoc(child.Max, append(path, internal.ExtensionRange_endTag)) } - if n.options != nil { + if n.Options != nil { optsPath := append(path, internal.ExtensionRange_optionsTag) - sci.newLoc(n.options, optsPath) + sci.newLoc(n.Options, optsPath) var optIndex int32 - for _, opt := range n.options.decls { + for _, opt := range n.Options.GetElements() { r.generateSourceCodeInfoForOption(sci, opt, true, &optIndex, optsPath) } } } } -func (r *parseResult) generateSourceCodeInfoForService(sci *sourceCodeInfo, n *serviceNode, path []int32) { +func (r *parseResult) generateSourceCodeInfoForService(sci *sourceCodeInfo, n *ast.ServiceNode, path []int32) { sci.newLoc(n, path) - sci.newLoc(n.name, append(path, internal.Service_nameTag)) + sci.newLoc(n.Name, append(path, internal.Service_nameTag)) var optIndex, rpcIndex int32 - for _, child := range n.decls { - switch { - case child.option != nil: - r.generateSourceCodeInfoForOption(sci, child.option, false, &optIndex, append(path, internal.Service_optionsTag)) - case child.rpc != nil: - r.generateSourceCodeInfoForMethod(sci, child.rpc, append(path, internal.Service_methodsTag, rpcIndex)) + for _, child := range n.Decls { + switch child := child.(type) { + case *ast.OptionNode: + r.generateSourceCodeInfoForOption(sci, child, false, &optIndex, append(path, internal.Service_optionsTag)) + case *ast.RPCNode: + r.generateSourceCodeInfoForMethod(sci, child, append(path, internal.Service_methodsTag, rpcIndex)) rpcIndex++ } } } -func (r *parseResult) generateSourceCodeInfoForMethod(sci *sourceCodeInfo, n *methodNode, path []int32) { +func (r *parseResult) generateSourceCodeInfoForMethod(sci *sourceCodeInfo, n *ast.RPCNode, path []int32) { sci.newLoc(n, path) - sci.newLoc(n.name, append(path, internal.Method_nameTag)) - if n.input.streamKeyword != nil { - sci.newLoc(n.input.streamKeyword, append(path, internal.Method_inputStreamTag)) + sci.newLoc(n.Name, append(path, internal.Method_nameTag)) + if n.Input.Stream != nil { + sci.newLoc(n.Input.Stream, append(path, internal.Method_inputStreamTag)) } - sci.newLoc(n.input.msgType, append(path, internal.Method_inputTag)) - if n.output.streamKeyword != nil { - sci.newLoc(n.output.streamKeyword, append(path, internal.Method_outputStreamTag)) + sci.newLoc(n.Input.MessageType, append(path, internal.Method_inputTag)) + if n.Output.Stream != nil { + sci.newLoc(n.Output.Stream, append(path, internal.Method_outputStreamTag)) } - sci.newLoc(n.output.msgType, append(path, internal.Method_outputTag)) + sci.newLoc(n.Output.MessageType, append(path, internal.Method_outputTag)) optsPath := append(path, internal.Method_optionsTag) var optIndex int32 - for _, opt := range n.options { - r.generateSourceCodeInfoForOption(sci, opt, false, &optIndex, optsPath) + for _, decl := range n.Decls { + if opt, ok := decl.(*ast.OptionNode); ok { + r.generateSourceCodeInfoForOption(sci, opt, false, &optIndex, optsPath) + } } } type sourceCodeInfo struct { locs []*dpb.SourceCodeInfo_Location - commentsUsed map[*comment]struct{} + commentsUsed map[*ast.Comment]struct{} } -func (sci *sourceCodeInfo) newLocWithoutComments(n node, path []int32) { +func (sci *sourceCodeInfo) newLocWithoutComments(n ast.Node, path []int32) { dup := make([]int32, len(path)) copy(dup, path) sci.locs = append(sci.locs, &dpb.SourceCodeInfo_Location{ Path: dup, - Span: makeSpan(n.start(), n.end()), + Span: makeSpan(n.Start(), n.End()), }) } -func (sci *sourceCodeInfo) newLoc(n node, path []int32) { - leadingComments := n.leadingComments() - trailingComments := n.trailingComments() +func (sci *sourceCodeInfo) newLoc(n ast.Node, path []int32) { + leadingComments := n.LeadingComments() + trailingComments := n.TrailingComments() if sci.commentUsed(leadingComments) { leadingComments = nil } @@ -423,7 +415,7 @@ func (sci *sourceCodeInfo) newLoc(n node, path []int32) { trail = proto.String(str) } var lead *string - if len(leadingComments) > 0 && leadingComments[len(leadingComments)-1].end.Line >= n.start().Line-1 { + if len(leadingComments) > 0 && leadingComments[len(leadingComments)-1].End.Line >= n.Start().Line-1 { lead = proto.String(detached[len(detached)-1]) detached = detached[:len(detached)-1] } @@ -434,7 +426,7 @@ func (sci *sourceCodeInfo) newLoc(n node, path []int32) { LeadingComments: lead, TrailingComments: trail, Path: dup, - Span: makeSpan(n.start(), n.end()), + Span: makeSpan(n.Start(), n.End()), }) } @@ -445,7 +437,7 @@ func makeSpan(start, end *SourcePos) []int32 { return []int32{int32(start.Line) - 1, int32(start.Col) - 1, int32(end.Line) - 1, int32(end.Col) - 1} } -func (sci *sourceCodeInfo) commentUsed(c []comment) bool { +func (sci *sourceCodeInfo) commentUsed(c []ast.Comment) bool { if len(c) == 0 { return false } @@ -457,27 +449,27 @@ func (sci *sourceCodeInfo) commentUsed(c []comment) bool { return false } -func groupComments(comments []comment) []string { +func groupComments(comments []ast.Comment) []string { if len(comments) == 0 { return nil } var groups []string - singleLineStyle := comments[0].text[:2] == "//" - line := comments[0].end.Line + singleLineStyle := comments[0].Text[:2] == "//" + line := comments[0].End.Line start := 0 for i := 1; i < len(comments); i++ { c := comments[i] prevSingleLine := singleLineStyle - singleLineStyle = strings.HasPrefix(comments[i].text, "//") - if !singleLineStyle || prevSingleLine != singleLineStyle || c.start.Line > line+1 { + singleLineStyle = strings.HasPrefix(comments[i].Text, "//") + if !singleLineStyle || prevSingleLine != singleLineStyle || c.Start.Line > line+1 { // new group! if str, ok := combineComments(comments[start:i]); ok { groups = append(groups, str) } start = i } - line = c.end.Line + line = c.End.Line } // don't forget last group if str, ok := combineComments(comments[start:]); ok { @@ -486,16 +478,16 @@ func groupComments(comments []comment) []string { return groups } -func combineComments(comments []comment) (string, bool) { +func combineComments(comments []ast.Comment) (string, bool) { if len(comments) == 0 { return "", false } var buf bytes.Buffer for _, c := range comments { - if c.text[:2] == "//" { - buf.WriteString(c.text[2:]) + if c.Text[:2] == "//" { + buf.WriteString(c.Text[2:]) } else { - lines := strings.Split(c.text[2:len(c.text)-2], "\n") + lines := strings.Split(c.Text[2:len(c.Text)-2], "\n") first := true for _, l := range lines { if first { diff --git a/vendor/github.com/jhump/protoreflect/desc/protoparse/validate.go b/vendor/github.com/jhump/protoreflect/desc/protoparse/validate.go index 70d9a51e55a..353af6b267c 100644 --- a/vendor/github.com/jhump/protoreflect/desc/protoparse/validate.go +++ b/vendor/github.com/jhump/protoreflect/desc/protoparse/validate.go @@ -5,8 +5,9 @@ import ( "sort" "github.com/golang/protobuf/proto" - dpb "github.com/golang/protobuf/protoc-gen-go/descriptor" + + "github.com/jhump/protoreflect/desc/protoparse/ast" ) func validateBasic(res *parseResult, containsErrors bool) { @@ -60,7 +61,7 @@ func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.Des if isProto3 && len(md.ExtensionRange) > 0 { n := res.getExtensionRangeNode(md.ExtensionRange[0]) - if err := res.errs.handleErrorWithPos(n.start(), "%s: extension ranges are not allowed in proto3", scope); err != nil { + if err := res.errs.handleErrorWithPos(n.Start(), "%s: extension ranges are not allowed in proto3", scope); err != nil { return err } } @@ -75,7 +76,7 @@ func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.Des if opt.IdentifierValue != nil { if opt.GetIdentifierValue() == "true" { valid = true - if err := res.errs.handleErrorWithPos(optn.getValue().start(), "%s: map_entry option should not be set explicitly; use map type instead", scope); err != nil { + if err := res.errs.handleErrorWithPos(optn.GetValue().Start(), "%s: map_entry option should not be set explicitly; use map type instead", scope); err != nil { return err } } else if opt.GetIdentifierValue() == "false" { @@ -84,7 +85,7 @@ func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.Des } } if !valid { - if err := res.errs.handleErrorWithPos(optn.getValue().start(), "%s: expecting bool value for map_entry option", scope); err != nil { + if err := res.errs.handleErrorWithPos(optn.GetValue().Start(), "%s: expecting bool value for map_entry option", scope); err != nil { return err } } @@ -100,7 +101,7 @@ func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.Des sort.Sort(rsvd) for i := 1; i < len(rsvd); i++ { if rsvd[i].start < rsvd[i-1].end { - if err := res.errs.handleErrorWithPos(rsvd[i].node.start(), "%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end-1, rsvd[i].start, rsvd[i].end-1); err != nil { + if err := res.errs.handleErrorWithPos(rsvd[i].node.Start(), "%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end-1, rsvd[i].start, rsvd[i].end-1); err != nil { return err } } @@ -115,7 +116,7 @@ func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.Des sort.Sort(exts) for i := 1; i < len(exts); i++ { if exts[i].start < exts[i-1].end { - if err := res.errs.handleErrorWithPos(exts[i].node.start(), "%s: extension ranges overlap: %d to %d and %d to %d", scope, exts[i-1].start, exts[i-1].end-1, exts[i].start, exts[i].end-1); err != nil { + if err := res.errs.handleErrorWithPos(exts[i].node.Start(), "%s: extension ranges overlap: %d to %d and %d to %d", scope, exts[i-1].start, exts[i-1].end-1, exts[i].start, exts[i].end-1); err != nil { return err } } @@ -129,9 +130,9 @@ func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.Des var pos *SourcePos if rsvd[i].start >= exts[j].start && rsvd[i].start < exts[j].end { - pos = rsvd[i].node.start() + pos = rsvd[i].node.Start() } else { - pos = exts[j].node.start() + pos = exts[j].node.Start() } // ranges overlap if err := res.errs.handleErrorWithPos(pos, "%s: extension range %d to %d overlaps reserved range %d to %d", scope, exts[j].start, exts[j].end-1, rsvd[i].start, rsvd[i].end-1); err != nil { @@ -155,12 +156,12 @@ func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.Des for _, fld := range md.Field { fn := res.getFieldNode(fld) if _, ok := rsvdNames[fld.GetName()]; ok { - if err := res.errs.handleErrorWithPos(fn.fieldName().start(), "%s: field %s is using a reserved name", scope, fld.GetName()); err != nil { + if err := res.errs.handleErrorWithPos(fn.FieldName().Start(), "%s: field %s is using a reserved name", scope, fld.GetName()); err != nil { return err } } if existing := fieldTags[fld.GetNumber()]; existing != "" { - if err := res.errs.handleErrorWithPos(fn.fieldTag().start(), "%s: fields %s and %s both have the same tag %d", scope, existing, fld.GetName(), fld.GetNumber()); err != nil { + if err := res.errs.handleErrorWithPos(fn.FieldTag().Start(), "%s: fields %s and %s both have the same tag %d", scope, existing, fld.GetName(), fld.GetNumber()); err != nil { return err } } @@ -168,14 +169,14 @@ func validateMessage(res *parseResult, isProto3 bool, prefix string, md *dpb.Des // check reserved ranges r := sort.Search(len(rsvd), func(index int) bool { return rsvd[index].end > fld.GetNumber() }) if r < len(rsvd) && rsvd[r].start <= fld.GetNumber() { - if err := res.errs.handleErrorWithPos(fn.fieldTag().start(), "%s: field %s is using tag %d which is in reserved range %d to %d", scope, fld.GetName(), fld.GetNumber(), rsvd[r].start, rsvd[r].end-1); err != nil { + if err := res.errs.handleErrorWithPos(fn.FieldTag().Start(), "%s: field %s is using tag %d which is in reserved range %d to %d", scope, fld.GetName(), fld.GetNumber(), rsvd[r].start, rsvd[r].end-1); err != nil { return err } } // and check extension ranges e := sort.Search(len(exts), func(index int) bool { return exts[index].end > fld.GetNumber() }) if e < len(exts) && exts[e].start <= fld.GetNumber() { - if err := res.errs.handleErrorWithPos(fn.fieldTag().start(), "%s: field %s is using tag %d which is in extension range %d to %d", scope, fld.GetName(), fld.GetNumber(), exts[e].start, exts[e].end-1); err != nil { + if err := res.errs.handleErrorWithPos(fn.FieldTag().Start(), "%s: field %s is using tag %d which is in extension range %d to %d", scope, fld.GetName(), fld.GetNumber(), exts[e].start, exts[e].end-1); err != nil { return err } } @@ -193,28 +194,29 @@ func validateEnum(res *parseResult, isProto3 bool, prefix string, ed *dpb.EnumDe // case the value would be absent from the descriptor. In such a case, this error // would be confusing and incorrect, so we just skip this check. enNode := res.getEnumNode(ed) - if err := res.errs.handleErrorWithPos(enNode.start(), "%s: enums must define at least one value", scope); err != nil { + if err := res.errs.handleErrorWithPos(enNode.Start(), "%s: enums must define at least one value", scope); err != nil { return err } } allowAlias := false + var allowAliasOpt *dpb.UninterpretedOption if index, err := findOption(res, scope, ed.Options.GetUninterpretedOption(), "allow_alias"); err != nil { return err } else if index >= 0 { - opt := ed.Options.UninterpretedOption[index] + allowAliasOpt = ed.Options.UninterpretedOption[index] valid := false - if opt.IdentifierValue != nil { - if opt.GetIdentifierValue() == "true" { + if allowAliasOpt.IdentifierValue != nil { + if allowAliasOpt.GetIdentifierValue() == "true" { allowAlias = true valid = true - } else if opt.GetIdentifierValue() == "false" { + } else if allowAliasOpt.GetIdentifierValue() == "false" { valid = true } } if !valid { - optNode := res.getOptionNode(opt) - if err := res.errs.handleErrorWithPos(optNode.getValue().start(), "%s: expecting bool value for allow_alias option", scope); err != nil { + optNode := res.getOptionNode(allowAliasOpt) + if err := res.errs.handleErrorWithPos(optNode.GetValue().Start(), "%s: expecting bool value for allow_alias option", scope); err != nil { return err } } @@ -222,22 +224,32 @@ func validateEnum(res *parseResult, isProto3 bool, prefix string, ed *dpb.EnumDe if isProto3 && len(ed.Value) > 0 && ed.Value[0].GetNumber() != 0 { evNode := res.getEnumValueNode(ed.Value[0]) - if err := res.errs.handleErrorWithPos(evNode.getNumber().start(), "%s: proto3 requires that first value in enum have numeric value of 0", scope); err != nil { + if err := res.errs.handleErrorWithPos(evNode.GetNumber().Start(), "%s: proto3 requires that first value in enum have numeric value of 0", scope); err != nil { return err } } - if !allowAlias { - // make sure all value numbers are distinct - vals := map[int32]string{} - for _, evd := range ed.Value { - if existing := vals[evd.GetNumber()]; existing != "" { + // check for aliases + vals := map[int32]string{} + hasAlias := false + for _, evd := range ed.Value { + existing := vals[evd.GetNumber()] + if existing != "" { + if allowAlias { + hasAlias = true + } else { evNode := res.getEnumValueNode(evd) - if err := res.errs.handleErrorWithPos(evNode.getNumber().start(), "%s: values %s and %s both have the same numeric value %d; use allow_alias option if intentional", scope, existing, evd.GetName(), evd.GetNumber()); err != nil { + if err := res.errs.handleErrorWithPos(evNode.GetNumber().Start(), "%s: values %s and %s both have the same numeric value %d; use allow_alias option if intentional", scope, existing, evd.GetName(), evd.GetNumber()); err != nil { return err } } - vals[evd.GetNumber()] = evd.GetName() + } + vals[evd.GetNumber()] = evd.GetName() + } + if allowAlias && !hasAlias { + optNode := res.getOptionNode(allowAliasOpt) + if err := res.errs.handleErrorWithPos(optNode.GetValue().Start(), "%s: allow_alias is true but no values are aliases", scope); err != nil { + return err } } @@ -250,7 +262,7 @@ func validateEnum(res *parseResult, isProto3 bool, prefix string, ed *dpb.EnumDe sort.Sort(rsvd) for i := 1; i < len(rsvd); i++ { if rsvd[i].start <= rsvd[i-1].end { - if err := res.errs.handleErrorWithPos(rsvd[i].node.start(), "%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end, rsvd[i].start, rsvd[i].end); err != nil { + if err := res.errs.handleErrorWithPos(rsvd[i].node.Start(), "%s: reserved ranges overlap: %d to %d and %d to %d", scope, rsvd[i-1].start, rsvd[i-1].end, rsvd[i].start, rsvd[i].end); err != nil { return err } } @@ -265,14 +277,14 @@ func validateEnum(res *parseResult, isProto3 bool, prefix string, ed *dpb.EnumDe for _, ev := range ed.Value { evn := res.getEnumValueNode(ev) if _, ok := rsvdNames[ev.GetName()]; ok { - if err := res.errs.handleErrorWithPos(evn.getName().start(), "%s: value %s is using a reserved name", scope, ev.GetName()); err != nil { + if err := res.errs.handleErrorWithPos(evn.GetName().Start(), "%s: value %s is using a reserved name", scope, ev.GetName()); err != nil { return err } } // check reserved ranges r := sort.Search(len(rsvd), func(index int) bool { return rsvd[index].end >= ev.GetNumber() }) if r < len(rsvd) && rsvd[r].start <= ev.GetNumber() { - if err := res.errs.handleErrorWithPos(evn.getNumber().start(), "%s: value %s is using number %d which is in reserved range %d to %d", scope, ev.GetName(), ev.GetNumber(), rsvd[r].start, rsvd[r].end); err != nil { + if err := res.errs.handleErrorWithPos(evn.GetNumber().Start(), "%s: value %s is using number %d which is in reserved range %d to %d", scope, ev.GetName(), ev.GetNumber(), rsvd[r].start, rsvd[r].end); err != nil { return err } } @@ -287,16 +299,11 @@ func validateField(res *parseResult, isProto3 bool, prefix string, fld *dpb.Fiel node := res.getFieldNode(fld) if isProto3 { if fld.GetType() == dpb.FieldDescriptorProto_TYPE_GROUP { - n := node.(*groupNode) - if err := res.errs.handleErrorWithPos(n.groupKeyword.start(), "%s: groups are not allowed in proto3", scope); err != nil { + if err := res.errs.handleErrorWithPos(node.GetGroupKeyword().Start(), "%s: groups are not allowed in proto3", scope); err != nil { return err } } else if fld.Label != nil && fld.GetLabel() == dpb.FieldDescriptorProto_LABEL_REQUIRED { - if err := res.errs.handleErrorWithPos(node.fieldLabel().start(), "%s: label 'required' is not allowed in proto3", scope); err != nil { - return err - } - } else if fld.Extendee != nil && fld.Label != nil && fld.GetLabel() == dpb.FieldDescriptorProto_LABEL_OPTIONAL { - if err := res.errs.handleErrorWithPos(node.fieldLabel().start(), "%s: label 'optional' is not allowed on extensions in proto3", scope); err != nil { + if err := res.errs.handleErrorWithPos(node.FieldLabel().Start(), "%s: label 'required' is not allowed in proto3", scope); err != nil { return err } } @@ -304,18 +311,18 @@ func validateField(res *parseResult, isProto3 bool, prefix string, fld *dpb.Fiel return err } else if index >= 0 { optNode := res.getOptionNode(fld.Options.GetUninterpretedOption()[index]) - if err := res.errs.handleErrorWithPos(optNode.getName().start(), "%s: default values are not allowed in proto3", scope); err != nil { + if err := res.errs.handleErrorWithPos(optNode.GetName().Start(), "%s: default values are not allowed in proto3", scope); err != nil { return err } } } else { if fld.Label == nil && fld.OneofIndex == nil { - if err := res.errs.handleErrorWithPos(node.fieldName().start(), "%s: field has no label; proto2 requires explicit 'optional' label", scope); err != nil { + if err := res.errs.handleErrorWithPos(node.FieldName().Start(), "%s: field has no label; proto2 requires explicit 'optional' label", scope); err != nil { return err } } if fld.GetExtendee() != "" && fld.Label != nil && fld.GetLabel() == dpb.FieldDescriptorProto_LABEL_REQUIRED { - if err := res.errs.handleErrorWithPos(node.fieldLabel().start(), "%s: extension fields cannot be 'required'", scope); err != nil { + if err := res.errs.handleErrorWithPos(node.FieldLabel().Start(), "%s: extension fields cannot be 'required'", scope); err != nil { return err } } @@ -332,7 +339,7 @@ func validateField(res *parseResult, isProto3 bool, prefix string, fld *dpb.Fiel type tagRange struct { start int32 end int32 - node rangeDecl + node ast.RangeDeclNode } type tagRanges []tagRange diff --git a/vendor/github.com/jhump/protoreflect/dynamic/dynamic_message.go b/vendor/github.com/jhump/protoreflect/dynamic/dynamic_message.go index 513971e7ff7..de13b923cce 100644 --- a/vendor/github.com/jhump/protoreflect/dynamic/dynamic_message.go +++ b/vendor/github.com/jhump/protoreflect/dynamic/dynamic_message.go @@ -2529,17 +2529,14 @@ func (m *Message) mergeFrom(pm proto.Message) error { // extension fields rexts, _ := proto.ExtensionDescs(pm) - var unknownExtensions []byte for _, ed := range rexts { v, _ := proto.GetExtension(pm, ed) if v == nil { continue } if ed.ExtensionType == nil { - extBytes, _ := v.([]byte) - if len(extBytes) > 0 { - unknownExtensions = append(unknownExtensions, extBytes...) - } + // unrecognized extension: we'll handle that below when we + // handle other unrecognized fields continue } fd := m.er.FindExtension(m.md.GetFullyQualifiedName(), ed.Field) @@ -2569,13 +2566,6 @@ func (m *Message) mergeFrom(pm proto.Message) error { _ = m.UnmarshalMerge(data) } - // lastly, also extract any unknown extensions the message may have (unknown extensions - // are stored with other extensions, not in the XXX_unrecognized field, so we have to do - // more than just the step above...) - if len(unknownExtensions) > 0 { - // pulling in unknown fields is best-effort, so we just ignore errors - _ = m.UnmarshalMerge(unknownExtensions) - } return nil } @@ -2627,12 +2617,15 @@ func (m *Message) validateRecursive(prefix string) error { var dm *Message if d, ok := pm.(*Message); ok { dm = d - } else { + } else if pm != nil { dm = m.mf.NewDynamicMessage(md) if err := dm.ConvertFrom(pm); err != nil { return nil } } + if dm == nil { + return nil + } if err := dm.validateRecursive(chprefix); err != nil { return err } diff --git a/vendor/google.golang.org/protobuf/encoding/protojson/decode.go b/vendor/google.golang.org/protobuf/encoding/protojson/decode.go index 9bf4e8c1763..e6953ad6660 100644 --- a/vendor/google.golang.org/protobuf/encoding/protojson/decode.go +++ b/vendor/google.golang.org/protobuf/encoding/protojson/decode.go @@ -124,15 +124,6 @@ func (d decoder) unmarshalMessage(m pref.Message, skipTypeURL bool) error { return d.unexpectedTokenError(tok) } - if err := d.unmarshalFields(m, skipTypeURL); err != nil { - return err - } - - return nil -} - -// unmarshalFields unmarshals the fields into the given protoreflect.Message. -func (d decoder) unmarshalFields(m pref.Message, skipTypeURL bool) error { messageDesc := m.Descriptor() if !flags.ProtoLegacy && messageset.IsMessageSet(messageDesc) { return errors.New("no support for proto1 MessageSets") @@ -170,7 +161,7 @@ func (d decoder) unmarshalFields(m pref.Message, skipTypeURL bool) error { if strings.HasPrefix(name, "[") && strings.HasSuffix(name, "]") { // Only extension names are in [name] format. extName := pref.FullName(name[1 : len(name)-1]) - extType, err := d.findExtension(extName) + extType, err := d.opts.Resolver.FindExtensionByName(extName) if err != nil && err != protoregistry.NotFound { return d.newError(tok.Pos(), "unable to resolve %s: %v", tok.RawString(), err) } @@ -184,17 +175,7 @@ func (d decoder) unmarshalFields(m pref.Message, skipTypeURL bool) error { // The name can either be the JSON name or the proto field name. fd = fieldDescs.ByJSONName(name) if fd == nil { - fd = fieldDescs.ByName(pref.Name(name)) - if fd == nil { - // The proto name of a group field is in all lowercase, - // while the textual field name is the group message name. - gd := fieldDescs.ByName(pref.Name(strings.ToLower(name))) - if gd != nil && gd.Kind() == pref.GroupKind && gd.Message().Name() == pref.Name(name) { - fd = gd - } - } else if fd.Kind() == pref.GroupKind && fd.Message().Name() != pref.Name(name) { - fd = nil // reset since field name is actually the message name - } + fd = fieldDescs.ByTextName(name) } } if flags.ProtoLegacy { @@ -257,15 +238,6 @@ func (d decoder) unmarshalFields(m pref.Message, skipTypeURL bool) error { } } -// findExtension returns protoreflect.ExtensionType from the resolver if found. -func (d decoder) findExtension(xtName pref.FullName) (pref.ExtensionType, error) { - xt, err := d.opts.Resolver.FindExtensionByName(xtName) - if err == nil { - return xt, nil - } - return messageset.FindMessageSetExtension(d.opts.Resolver, xtName) -} - func isKnownValue(fd pref.FieldDescriptor) bool { md := fd.Message() return md != nil && md.FullName() == genid.Value_message_fullname diff --git a/vendor/google.golang.org/protobuf/encoding/protojson/encode.go b/vendor/google.golang.org/protobuf/encoding/protojson/encode.go index 7d619330081..ba971f07810 100644 --- a/vendor/google.golang.org/protobuf/encoding/protojson/encode.go +++ b/vendor/google.golang.org/protobuf/encoding/protojson/encode.go @@ -7,15 +7,17 @@ package protojson import ( "encoding/base64" "fmt" - "sort" "google.golang.org/protobuf/internal/encoding/json" "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/filedesc" "google.golang.org/protobuf/internal/flags" "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/order" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" pref "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoregistry" ) @@ -131,7 +133,7 @@ func (o MarshalOptions) marshal(m proto.Message) ([]byte, error) { } enc := encoder{internalEnc, o} - if err := enc.marshalMessage(m.ProtoReflect()); err != nil { + if err := enc.marshalMessage(m.ProtoReflect(), ""); err != nil { return nil, err } if o.AllowPartial { @@ -145,76 +147,94 @@ type encoder struct { opts MarshalOptions } -// marshalMessage marshals the given protoreflect.Message. -func (e encoder) marshalMessage(m pref.Message) error { - if marshal := wellKnownTypeMarshaler(m.Descriptor().FullName()); marshal != nil { - return marshal(e, m) - } +// typeFieldDesc is a synthetic field descriptor used for the "@type" field. +var typeFieldDesc = func() protoreflect.FieldDescriptor { + var fd filedesc.Field + fd.L0.FullName = "@type" + fd.L0.Index = -1 + fd.L1.Cardinality = protoreflect.Optional + fd.L1.Kind = protoreflect.StringKind + return &fd +}() + +// typeURLFieldRanger wraps a protoreflect.Message and modifies its Range method +// to additionally iterate over a synthetic field for the type URL. +type typeURLFieldRanger struct { + order.FieldRanger + typeURL string +} - e.StartObject() - defer e.EndObject() - if err := e.marshalFields(m); err != nil { - return err +func (m typeURLFieldRanger) Range(f func(pref.FieldDescriptor, pref.Value) bool) { + if !f(typeFieldDesc, pref.ValueOfString(m.typeURL)) { + return } + m.FieldRanger.Range(f) +} - return nil +// unpopulatedFieldRanger wraps a protoreflect.Message and modifies its Range +// method to additionally iterate over unpopulated fields. +type unpopulatedFieldRanger struct{ pref.Message } + +func (m unpopulatedFieldRanger) Range(f func(pref.FieldDescriptor, pref.Value) bool) { + fds := m.Descriptor().Fields() + for i := 0; i < fds.Len(); i++ { + fd := fds.Get(i) + if m.Has(fd) || fd.ContainingOneof() != nil { + continue // ignore populated fields and fields within a oneofs + } + + v := m.Get(fd) + isProto2Scalar := fd.Syntax() == pref.Proto2 && fd.Default().IsValid() + isSingularMessage := fd.Cardinality() != pref.Repeated && fd.Message() != nil + if isProto2Scalar || isSingularMessage { + v = pref.Value{} // use invalid value to emit null + } + if !f(fd, v) { + return + } + } + m.Message.Range(f) } -// marshalFields marshals the fields in the given protoreflect.Message. -func (e encoder) marshalFields(m pref.Message) error { - messageDesc := m.Descriptor() - if !flags.ProtoLegacy && messageset.IsMessageSet(messageDesc) { +// marshalMessage marshals the fields in the given protoreflect.Message. +// If the typeURL is non-empty, then a synthetic "@type" field is injected +// containing the URL as the value. +func (e encoder) marshalMessage(m pref.Message, typeURL string) error { + if !flags.ProtoLegacy && messageset.IsMessageSet(m.Descriptor()) { return errors.New("no support for proto1 MessageSets") } - // Marshal out known fields. - fieldDescs := messageDesc.Fields() - for i := 0; i < fieldDescs.Len(); { - fd := fieldDescs.Get(i) - if od := fd.ContainingOneof(); od != nil { - fd = m.WhichOneof(od) - i += od.Fields().Len() - if fd == nil { - continue // unpopulated oneofs are not affected by EmitUnpopulated - } - } else { - i++ - } + if marshal := wellKnownTypeMarshaler(m.Descriptor().FullName()); marshal != nil { + return marshal(e, m) + } - val := m.Get(fd) - if !m.Has(fd) { - if !e.opts.EmitUnpopulated { - continue - } - isProto2Scalar := fd.Syntax() == pref.Proto2 && fd.Default().IsValid() - isSingularMessage := fd.Cardinality() != pref.Repeated && fd.Message() != nil - if isProto2Scalar || isSingularMessage { - // Use invalid value to emit null. - val = pref.Value{} - } - } + e.StartObject() + defer e.EndObject() + var fields order.FieldRanger = m + if e.opts.EmitUnpopulated { + fields = unpopulatedFieldRanger{m} + } + if typeURL != "" { + fields = typeURLFieldRanger{fields, typeURL} + } + + var err error + order.RangeFields(fields, order.IndexNameFieldOrder, func(fd pref.FieldDescriptor, v pref.Value) bool { name := fd.JSONName() if e.opts.UseProtoNames { - name = string(fd.Name()) - // Use type name for group field name. - if fd.Kind() == pref.GroupKind { - name = string(fd.Message().Name()) - } + name = fd.TextName() } - if err := e.WriteName(name); err != nil { - return err + + if err = e.WriteName(name); err != nil { + return false } - if err := e.marshalValue(val, fd); err != nil { - return err + if err = e.marshalValue(v, fd); err != nil { + return false } - } - - // Marshal out extensions. - if err := e.marshalExtensions(m); err != nil { - return err - } - return nil + return true + }) + return err } // marshalValue marshals the given protoreflect.Value. @@ -281,7 +301,7 @@ func (e encoder) marshalSingular(val pref.Value, fd pref.FieldDescriptor) error } case pref.MessageKind, pref.GroupKind: - if err := e.marshalMessage(val.Message()); err != nil { + if err := e.marshalMessage(val.Message(), ""); err != nil { return err } @@ -305,98 +325,20 @@ func (e encoder) marshalList(list pref.List, fd pref.FieldDescriptor) error { return nil } -type mapEntry struct { - key pref.MapKey - value pref.Value -} - // marshalMap marshals given protoreflect.Map. func (e encoder) marshalMap(mmap pref.Map, fd pref.FieldDescriptor) error { e.StartObject() defer e.EndObject() - // Get a sorted list based on keyType first. - entries := make([]mapEntry, 0, mmap.Len()) - mmap.Range(func(key pref.MapKey, val pref.Value) bool { - entries = append(entries, mapEntry{key: key, value: val}) - return true - }) - sortMap(fd.MapKey().Kind(), entries) - - // Write out sorted list. - for _, entry := range entries { - if err := e.WriteName(entry.key.String()); err != nil { - return err - } - if err := e.marshalSingular(entry.value, fd.MapValue()); err != nil { - return err - } - } - return nil -} - -// sortMap orders list based on value of key field for deterministic ordering. -func sortMap(keyKind pref.Kind, values []mapEntry) { - sort.Slice(values, func(i, j int) bool { - switch keyKind { - case pref.Int32Kind, pref.Sint32Kind, pref.Sfixed32Kind, - pref.Int64Kind, pref.Sint64Kind, pref.Sfixed64Kind: - return values[i].key.Int() < values[j].key.Int() - - case pref.Uint32Kind, pref.Fixed32Kind, - pref.Uint64Kind, pref.Fixed64Kind: - return values[i].key.Uint() < values[j].key.Uint() + var err error + order.RangeEntries(mmap, order.GenericKeyOrder, func(k pref.MapKey, v pref.Value) bool { + if err = e.WriteName(k.String()); err != nil { + return false } - return values[i].key.String() < values[j].key.String() - }) -} - -// marshalExtensions marshals extension fields. -func (e encoder) marshalExtensions(m pref.Message) error { - type entry struct { - key string - value pref.Value - desc pref.FieldDescriptor - } - - // Get a sorted list based on field key first. - var entries []entry - m.Range(func(fd pref.FieldDescriptor, v pref.Value) bool { - if !fd.IsExtension() { - return true + if err = e.marshalSingular(v, fd.MapValue()); err != nil { + return false } - - // For MessageSet extensions, the name used is the parent message. - name := fd.FullName() - if messageset.IsMessageSetExtension(fd) { - name = name.Parent() - } - - // Use [name] format for JSON field name. - entries = append(entries, entry{ - key: string(name), - value: v, - desc: fd, - }) return true }) - - // Sort extensions lexicographically. - sort.Slice(entries, func(i, j int) bool { - return entries[i].key < entries[j].key - }) - - // Write out sorted list. - for _, entry := range entries { - // JSON field name is the proto field name enclosed in [], similar to - // textproto. This is consistent with Go v1 lib. C++ lib v3.7.0 does not - // marshal out extension fields. - if err := e.WriteName("[" + entry.key + "]"); err != nil { - return err - } - if err := e.marshalValue(entry.value, entry.desc); err != nil { - return err - } - } - return nil + return err } diff --git a/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go b/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go index def7377c78b..73b038eba78 100644 --- a/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go +++ b/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go @@ -106,13 +106,11 @@ func (e encoder) marshalAny(m pref.Message) error { fdType := fds.ByNumber(genid.Any_TypeUrl_field_number) fdValue := fds.ByNumber(genid.Any_Value_field_number) - // Start writing the JSON object. - e.StartObject() - defer e.EndObject() - if !m.Has(fdType) { if !m.Has(fdValue) { // If message is empty, marshal out empty JSON object. + e.StartObject() + e.EndObject() return nil } else { // Return error if type_url field is not set, but value is set. @@ -123,14 +121,8 @@ func (e encoder) marshalAny(m pref.Message) error { typeVal := m.Get(fdType) valueVal := m.Get(fdValue) - // Marshal out @type field. - typeURL := typeVal.String() - e.WriteName("@type") - if err := e.WriteString(typeURL); err != nil { - return err - } - // Resolve the type in order to unmarshal value field. + typeURL := typeVal.String() emt, err := e.opts.Resolver.FindMessageByURL(typeURL) if err != nil { return errors.New("%s: unable to resolve %q: %v", genid.Any_message_fullname, typeURL, err) @@ -149,12 +141,21 @@ func (e encoder) marshalAny(m pref.Message) error { // with corresponding custom JSON encoding of the embedded message as a // field. if marshal := wellKnownTypeMarshaler(emt.Descriptor().FullName()); marshal != nil { + e.StartObject() + defer e.EndObject() + + // Marshal out @type field. + e.WriteName("@type") + if err := e.WriteString(typeURL); err != nil { + return err + } + e.WriteName("value") return marshal(e, em) } // Else, marshal out the embedded message's fields in this Any object. - if err := e.marshalFields(em); err != nil { + if err := e.marshalMessage(em, typeURL); err != nil { return err } diff --git a/vendor/google.golang.org/protobuf/encoding/prototext/decode.go b/vendor/google.golang.org/protobuf/encoding/prototext/decode.go index cab95a42735..e2bbf7c6031 100644 --- a/vendor/google.golang.org/protobuf/encoding/prototext/decode.go +++ b/vendor/google.golang.org/protobuf/encoding/prototext/decode.go @@ -6,7 +6,6 @@ package prototext import ( "fmt" - "strings" "unicode/utf8" "google.golang.org/protobuf/internal/encoding/messageset" @@ -158,21 +157,11 @@ func (d decoder) unmarshalMessage(m pref.Message, checkDelims bool) error { switch tok.NameKind() { case text.IdentName: name = pref.Name(tok.IdentName()) - fd = fieldDescs.ByName(name) - if fd == nil { - // The proto name of a group field is in all lowercase, - // while the textproto field name is the group message name. - gd := fieldDescs.ByName(pref.Name(strings.ToLower(string(name)))) - if gd != nil && gd.Kind() == pref.GroupKind && gd.Message().Name() == name { - fd = gd - } - } else if fd.Kind() == pref.GroupKind && fd.Message().Name() != name { - fd = nil // reset since field name is actually the message name - } + fd = fieldDescs.ByTextName(string(name)) case text.TypeName: // Handle extensions only. This code path is not for Any. - xt, xtErr = d.findExtension(pref.FullName(tok.TypeName())) + xt, xtErr = d.opts.Resolver.FindExtensionByName(pref.FullName(tok.TypeName())) case text.FieldNumber: isFieldNumberName = true @@ -269,15 +258,6 @@ func (d decoder) unmarshalMessage(m pref.Message, checkDelims bool) error { return nil } -// findExtension returns protoreflect.ExtensionType from the Resolver if found. -func (d decoder) findExtension(xtName pref.FullName) (pref.ExtensionType, error) { - xt, err := d.opts.Resolver.FindExtensionByName(xtName) - if err == nil { - return xt, nil - } - return messageset.FindMessageSetExtension(d.opts.Resolver, xtName) -} - // unmarshalSingular unmarshals a non-repeated field value specified by the // given FieldDescriptor. func (d decoder) unmarshalSingular(fd pref.FieldDescriptor, m pref.Message) error { diff --git a/vendor/google.golang.org/protobuf/encoding/prototext/encode.go b/vendor/google.golang.org/protobuf/encoding/prototext/encode.go index 0877d71c519..8d5304dc5b3 100644 --- a/vendor/google.golang.org/protobuf/encoding/prototext/encode.go +++ b/vendor/google.golang.org/protobuf/encoding/prototext/encode.go @@ -6,7 +6,6 @@ package prototext import ( "fmt" - "sort" "strconv" "unicode/utf8" @@ -16,10 +15,11 @@ import ( "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/internal/flags" "google.golang.org/protobuf/internal/genid" - "google.golang.org/protobuf/internal/mapsort" + "google.golang.org/protobuf/internal/order" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/internal/strs" "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" pref "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoregistry" ) @@ -169,35 +169,15 @@ func (e encoder) marshalMessage(m pref.Message, inclDelims bool) error { // If unable to expand, continue on to marshal Any as a regular message. } - // Marshal known fields. - fieldDescs := messageDesc.Fields() - size := fieldDescs.Len() - for i := 0; i < size; { - fd := fieldDescs.Get(i) - if od := fd.ContainingOneof(); od != nil { - fd = m.WhichOneof(od) - i += od.Fields().Len() - } else { - i++ - } - - if fd == nil || !m.Has(fd) { - continue - } - - name := fd.Name() - // Use type name for group field name. - if fd.Kind() == pref.GroupKind { - name = fd.Message().Name() - } - val := m.Get(fd) - if err := e.marshalField(string(name), val, fd); err != nil { - return err + // Marshal fields. + var err error + order.RangeFields(m, order.IndexNameFieldOrder, func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + if err = e.marshalField(fd.TextName(), v, fd); err != nil { + return false } - } - - // Marshal extensions. - if err := e.marshalExtensions(m); err != nil { + return true + }) + if err != nil { return err } @@ -290,7 +270,7 @@ func (e encoder) marshalList(name string, list pref.List, fd pref.FieldDescripto // marshalMap marshals the given protoreflect.Map as multiple name-value fields. func (e encoder) marshalMap(name string, mmap pref.Map, fd pref.FieldDescriptor) error { var err error - mapsort.Range(mmap, fd.MapKey().Kind(), func(key pref.MapKey, val pref.Value) bool { + order.RangeEntries(mmap, order.GenericKeyOrder, func(key pref.MapKey, val pref.Value) bool { e.WriteName(name) e.StartMessage() defer e.EndMessage() @@ -311,48 +291,6 @@ func (e encoder) marshalMap(name string, mmap pref.Map, fd pref.FieldDescriptor) return err } -// marshalExtensions marshals extension fields. -func (e encoder) marshalExtensions(m pref.Message) error { - type entry struct { - key string - value pref.Value - desc pref.FieldDescriptor - } - - // Get a sorted list based on field key first. - var entries []entry - m.Range(func(fd pref.FieldDescriptor, v pref.Value) bool { - if !fd.IsExtension() { - return true - } - // For MessageSet extensions, the name used is the parent message. - name := fd.FullName() - if messageset.IsMessageSetExtension(fd) { - name = name.Parent() - } - entries = append(entries, entry{ - key: string(name), - value: v, - desc: fd, - }) - return true - }) - // Sort extensions lexicographically. - sort.Slice(entries, func(i, j int) bool { - return entries[i].key < entries[j].key - }) - - // Write out sorted list. - for _, entry := range entries { - // Extension field name is the proto field name enclosed in []. - name := "[" + entry.key + "]" - if err := e.marshalField(name, entry.value, entry.desc); err != nil { - return err - } - } - return nil -} - // marshalUnknown parses the given []byte and marshals fields out. // This function assumes proper encoding in the given []byte. func (e encoder) marshalUnknown(b []byte) { diff --git a/vendor/google.golang.org/protobuf/internal/detrand/rand.go b/vendor/google.golang.org/protobuf/internal/detrand/rand.go index a904dd1f91a..49c8676d484 100644 --- a/vendor/google.golang.org/protobuf/internal/detrand/rand.go +++ b/vendor/google.golang.org/protobuf/internal/detrand/rand.go @@ -26,6 +26,14 @@ func Bool() bool { return randSeed%2 == 1 } +// Intn returns a deterministically random integer between 0 and n-1, inclusive. +func Intn(n int) int { + if n <= 0 { + panic("must be positive") + } + return int(randSeed % uint64(n)) +} + // randSeed is a best-effort at an approximate hash of the Go binary. var randSeed = binaryHash() diff --git a/vendor/google.golang.org/protobuf/internal/encoding/messageset/messageset.go b/vendor/google.golang.org/protobuf/internal/encoding/messageset/messageset.go index b1eeea50797..453a81a5648 100644 --- a/vendor/google.golang.org/protobuf/internal/encoding/messageset/messageset.go +++ b/vendor/google.golang.org/protobuf/internal/encoding/messageset/messageset.go @@ -11,7 +11,6 @@ import ( "google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/internal/errors" pref "google.golang.org/protobuf/reflect/protoreflect" - preg "google.golang.org/protobuf/reflect/protoregistry" ) // The MessageSet wire format is equivalent to a message defiend as follows, @@ -48,33 +47,17 @@ func IsMessageSet(md pref.MessageDescriptor) bool { return ok && xmd.IsMessageSet() } -// IsMessageSetExtension reports this field extends a MessageSet. +// IsMessageSetExtension reports this field properly extends a MessageSet. func IsMessageSetExtension(fd pref.FieldDescriptor) bool { - if fd.Name() != ExtensionName { + switch { + case fd.Name() != ExtensionName: return false - } - if fd.FullName().Parent() != fd.Message().FullName() { + case !IsMessageSet(fd.ContainingMessage()): + return false + case fd.FullName().Parent() != fd.Message().FullName(): return false } - return IsMessageSet(fd.ContainingMessage()) -} - -// FindMessageSetExtension locates a MessageSet extension field by name. -// In text and JSON formats, the extension name used is the message itself. -// The extension field name is derived by appending ExtensionName. -func FindMessageSetExtension(r preg.ExtensionTypeResolver, s pref.FullName) (pref.ExtensionType, error) { - name := s.Append(ExtensionName) - xt, err := r.FindExtensionByName(name) - if err != nil { - if err == preg.NotFound { - return nil, err - } - return nil, errors.Wrap(err, "%q", name) - } - if !IsMessageSetExtension(xt.TypeDescriptor()) { - return nil, preg.NotFound - } - return xt, nil + return true } // SizeField returns the size of a MessageSet item field containing an extension diff --git a/vendor/google.golang.org/protobuf/internal/encoding/tag/tag.go b/vendor/google.golang.org/protobuf/internal/encoding/tag/tag.go index 16c02d7b627..38f1931c6fd 100644 --- a/vendor/google.golang.org/protobuf/internal/encoding/tag/tag.go +++ b/vendor/google.golang.org/protobuf/internal/encoding/tag/tag.go @@ -104,7 +104,7 @@ func Unmarshal(tag string, goType reflect.Type, evs pref.EnumValueDescriptors) p case strings.HasPrefix(s, "json="): jsonName := s[len("json="):] if jsonName != strs.JSONCamelCase(string(f.L0.FullName.Name())) { - f.L1.JSONName.Init(jsonName) + f.L1.StringName.InitJSON(jsonName) } case s == "packed": f.L1.HasPacked = true diff --git a/vendor/google.golang.org/protobuf/internal/fieldsort/fieldsort.go b/vendor/google.golang.org/protobuf/internal/fieldsort/fieldsort.go deleted file mode 100644 index 517c4e2a041..00000000000 --- a/vendor/google.golang.org/protobuf/internal/fieldsort/fieldsort.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package fieldsort defines an ordering of fields. -// -// The ordering defined by this package matches the historic behavior of the proto -// package, placing extensions first and oneofs last. -// -// There is no guarantee about stability of the wire encoding, and users should not -// depend on the order defined in this package as it is subject to change without -// notice. -package fieldsort - -import ( - "google.golang.org/protobuf/reflect/protoreflect" -) - -// Less returns true if field a comes before field j in ordered wire marshal output. -func Less(a, b protoreflect.FieldDescriptor) bool { - ea := a.IsExtension() - eb := b.IsExtension() - oa := a.ContainingOneof() - ob := b.ContainingOneof() - switch { - case ea != eb: - return ea - case oa != nil && ob != nil: - if oa == ob { - return a.Number() < b.Number() - } - return oa.Index() < ob.Index() - case oa != nil && !oa.IsSynthetic(): - return false - case ob != nil && !ob.IsSynthetic(): - return true - default: - return a.Number() < b.Number() - } -} diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/build.go b/vendor/google.golang.org/protobuf/internal/filedesc/build.go index d02d770c984..b293b694736 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/build.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/build.go @@ -3,6 +3,9 @@ // license that can be found in the LICENSE file. // Package filedesc provides functionality for constructing descriptors. +// +// The types in this package implement interfaces in the protoreflect package +// related to protobuf descripriptors. package filedesc import ( diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc.go index 9385126fba6..12f65f3782f 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc.go @@ -13,6 +13,7 @@ import ( "google.golang.org/protobuf/internal/descfmt" "google.golang.org/protobuf/internal/descopts" "google.golang.org/protobuf/internal/encoding/defval" + "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/internal/strs" @@ -207,7 +208,7 @@ type ( Number pref.FieldNumber Cardinality pref.Cardinality // must be consistent with Message.RequiredNumbers Kind pref.Kind - JSONName jsonName + StringName stringName IsProto3Optional bool // promoted from google.protobuf.FieldDescriptorProto IsWeak bool // promoted from google.protobuf.FieldOptions HasPacked bool // promoted from google.protobuf.FieldOptions @@ -277,8 +278,9 @@ func (fd *Field) Options() pref.ProtoMessage { func (fd *Field) Number() pref.FieldNumber { return fd.L1.Number } func (fd *Field) Cardinality() pref.Cardinality { return fd.L1.Cardinality } func (fd *Field) Kind() pref.Kind { return fd.L1.Kind } -func (fd *Field) HasJSONName() bool { return fd.L1.JSONName.has } -func (fd *Field) JSONName() string { return fd.L1.JSONName.get(fd) } +func (fd *Field) HasJSONName() bool { return fd.L1.StringName.hasJSON } +func (fd *Field) JSONName() string { return fd.L1.StringName.getJSON(fd) } +func (fd *Field) TextName() string { return fd.L1.StringName.getText(fd) } func (fd *Field) HasPresence() bool { return fd.L1.Cardinality != pref.Repeated && (fd.L0.ParentFile.L1.Syntax == pref.Proto2 || fd.L1.Message != nil || fd.L1.ContainingOneof != nil) } @@ -373,7 +375,7 @@ type ( } ExtensionL2 struct { Options func() pref.ProtoMessage - JSONName jsonName + StringName stringName IsProto3Optional bool // promoted from google.protobuf.FieldDescriptorProto IsPacked bool // promoted from google.protobuf.FieldOptions Default defaultValue @@ -391,8 +393,9 @@ func (xd *Extension) Options() pref.ProtoMessage { func (xd *Extension) Number() pref.FieldNumber { return xd.L1.Number } func (xd *Extension) Cardinality() pref.Cardinality { return xd.L1.Cardinality } func (xd *Extension) Kind() pref.Kind { return xd.L1.Kind } -func (xd *Extension) HasJSONName() bool { return xd.lazyInit().JSONName.has } -func (xd *Extension) JSONName() string { return xd.lazyInit().JSONName.get(xd) } +func (xd *Extension) HasJSONName() bool { return xd.lazyInit().StringName.hasJSON } +func (xd *Extension) JSONName() string { return xd.lazyInit().StringName.getJSON(xd) } +func (xd *Extension) TextName() string { return xd.lazyInit().StringName.getText(xd) } func (xd *Extension) HasPresence() bool { return xd.L1.Cardinality != pref.Repeated } func (xd *Extension) HasOptionalKeyword() bool { return (xd.L0.ParentFile.L1.Syntax == pref.Proto2 && xd.L1.Cardinality == pref.Optional) || xd.lazyInit().IsProto3Optional @@ -506,27 +509,50 @@ func (d *Base) Syntax() pref.Syntax { return d.L0.ParentFile.Syn func (d *Base) IsPlaceholder() bool { return false } func (d *Base) ProtoInternal(pragma.DoNotImplement) {} -type jsonName struct { - has bool - once sync.Once - name string +type stringName struct { + hasJSON bool + once sync.Once + nameJSON string + nameText string } -// Init initializes the name. It is exported for use by other internal packages. -func (js *jsonName) Init(s string) { - js.has = true - js.name = s +// InitJSON initializes the name. It is exported for use by other internal packages. +func (s *stringName) InitJSON(name string) { + s.hasJSON = true + s.nameJSON = name } -func (js *jsonName) get(fd pref.FieldDescriptor) string { - if !js.has { - js.once.Do(func() { - js.name = strs.JSONCamelCase(string(fd.Name())) - }) - } - return js.name +func (s *stringName) lazyInit(fd pref.FieldDescriptor) *stringName { + s.once.Do(func() { + if fd.IsExtension() { + // For extensions, JSON and text are formatted the same way. + var name string + if messageset.IsMessageSetExtension(fd) { + name = string("[" + fd.FullName().Parent() + "]") + } else { + name = string("[" + fd.FullName() + "]") + } + s.nameJSON = name + s.nameText = name + } else { + // Format the JSON name. + if !s.hasJSON { + s.nameJSON = strs.JSONCamelCase(string(fd.Name())) + } + + // Format the text name. + s.nameText = string(fd.Name()) + if fd.Kind() == pref.GroupKind { + s.nameText = string(fd.Message().Name()) + } + } + }) + return s } +func (s *stringName) getJSON(fd pref.FieldDescriptor) string { return s.lazyInit(fd).nameJSON } +func (s *stringName) getText(fd pref.FieldDescriptor) string { return s.lazyInit(fd).nameText } + func DefaultValue(v pref.Value, ev pref.EnumValueDescriptor) defaultValue { dv := defaultValue{has: v.IsValid(), val: v, enum: ev} if b, ok := v.Interface().([]byte); ok { diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go index e672233e77e..198451e3ec9 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go @@ -451,7 +451,7 @@ func (fd *Field) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.Des case genid.FieldDescriptorProto_Name_field_number: fd.L0.FullName = appendFullName(sb, pd.FullName(), v) case genid.FieldDescriptorProto_JsonName_field_number: - fd.L1.JSONName.Init(sb.MakeString(v)) + fd.L1.StringName.InitJSON(sb.MakeString(v)) case genid.FieldDescriptorProto_DefaultValue_field_number: fd.L1.Default.val = pref.ValueOfBytes(v) // temporarily store as bytes; later resolved in resolveMessages case genid.FieldDescriptorProto_TypeName_field_number: @@ -551,7 +551,7 @@ func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) { b = b[m:] switch num { case genid.FieldDescriptorProto_JsonName_field_number: - xd.L2.JSONName.Init(sb.MakeString(v)) + xd.L2.StringName.InitJSON(sb.MakeString(v)) case genid.FieldDescriptorProto_DefaultValue_field_number: xd.L2.Default.val = pref.ValueOfBytes(v) // temporarily store as bytes; later resolved in resolveExtensions case genid.FieldDescriptorProto_TypeName_field_number: diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go index c876cd34d70..aa294fff99a 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go @@ -6,9 +6,12 @@ package filedesc import ( "fmt" + "math" "sort" "sync" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/internal/descfmt" "google.golang.org/protobuf/internal/errors" @@ -245,6 +248,7 @@ type OneofFields struct { once sync.Once byName map[pref.Name]pref.FieldDescriptor // protected by once byJSON map[string]pref.FieldDescriptor // protected by once + byText map[string]pref.FieldDescriptor // protected by once byNum map[pref.FieldNumber]pref.FieldDescriptor // protected by once } @@ -252,6 +256,7 @@ func (p *OneofFields) Len() int { return func (p *OneofFields) Get(i int) pref.FieldDescriptor { return p.List[i] } func (p *OneofFields) ByName(s pref.Name) pref.FieldDescriptor { return p.lazyInit().byName[s] } func (p *OneofFields) ByJSONName(s string) pref.FieldDescriptor { return p.lazyInit().byJSON[s] } +func (p *OneofFields) ByTextName(s string) pref.FieldDescriptor { return p.lazyInit().byText[s] } func (p *OneofFields) ByNumber(n pref.FieldNumber) pref.FieldDescriptor { return p.lazyInit().byNum[n] } func (p *OneofFields) Format(s fmt.State, r rune) { descfmt.FormatList(s, r, p) } func (p *OneofFields) ProtoInternal(pragma.DoNotImplement) {} @@ -261,11 +266,13 @@ func (p *OneofFields) lazyInit() *OneofFields { if len(p.List) > 0 { p.byName = make(map[pref.Name]pref.FieldDescriptor, len(p.List)) p.byJSON = make(map[string]pref.FieldDescriptor, len(p.List)) + p.byText = make(map[string]pref.FieldDescriptor, len(p.List)) p.byNum = make(map[pref.FieldNumber]pref.FieldDescriptor, len(p.List)) for _, f := range p.List { // Field names and numbers are guaranteed to be unique. p.byName[f.Name()] = f p.byJSON[f.JSONName()] = f + p.byText[f.TextName()] = f p.byNum[f.Number()] = f } } @@ -274,9 +281,170 @@ func (p *OneofFields) lazyInit() *OneofFields { } type SourceLocations struct { + // List is a list of SourceLocations. + // The SourceLocation.Next field does not need to be populated + // as it will be lazily populated upon first need. List []pref.SourceLocation + + // File is the parent file descriptor that these locations are relative to. + // If non-nil, ByDescriptor verifies that the provided descriptor + // is a child of this file descriptor. + File pref.FileDescriptor + + once sync.Once + byPath map[pathKey]int +} + +func (p *SourceLocations) Len() int { return len(p.List) } +func (p *SourceLocations) Get(i int) pref.SourceLocation { return p.lazyInit().List[i] } +func (p *SourceLocations) byKey(k pathKey) pref.SourceLocation { + if i, ok := p.lazyInit().byPath[k]; ok { + return p.List[i] + } + return pref.SourceLocation{} +} +func (p *SourceLocations) ByPath(path pref.SourcePath) pref.SourceLocation { + return p.byKey(newPathKey(path)) +} +func (p *SourceLocations) ByDescriptor(desc pref.Descriptor) pref.SourceLocation { + if p.File != nil && desc != nil && p.File != desc.ParentFile() { + return pref.SourceLocation{} // mismatching parent files + } + var pathArr [16]int32 + path := pathArr[:0] + for { + switch desc.(type) { + case pref.FileDescriptor: + // Reverse the path since it was constructed in reverse. + for i, j := 0, len(path)-1; i < j; i, j = i+1, j-1 { + path[i], path[j] = path[j], path[i] + } + return p.byKey(newPathKey(path)) + case pref.MessageDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.FileDescriptor: + path = append(path, int32(genid.FileDescriptorProto_MessageType_field_number)) + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_NestedType_field_number)) + default: + return pref.SourceLocation{} + } + case pref.FieldDescriptor: + isExtension := desc.(pref.FieldDescriptor).IsExtension() + path = append(path, int32(desc.Index())) + desc = desc.Parent() + if isExtension { + switch desc.(type) { + case pref.FileDescriptor: + path = append(path, int32(genid.FileDescriptorProto_Extension_field_number)) + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_Extension_field_number)) + default: + return pref.SourceLocation{} + } + } else { + switch desc.(type) { + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_Field_field_number)) + default: + return pref.SourceLocation{} + } + } + case pref.OneofDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_OneofDecl_field_number)) + default: + return pref.SourceLocation{} + } + case pref.EnumDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.FileDescriptor: + path = append(path, int32(genid.FileDescriptorProto_EnumType_field_number)) + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_EnumType_field_number)) + default: + return pref.SourceLocation{} + } + case pref.EnumValueDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.EnumDescriptor: + path = append(path, int32(genid.EnumDescriptorProto_Value_field_number)) + default: + return pref.SourceLocation{} + } + case pref.ServiceDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.FileDescriptor: + path = append(path, int32(genid.FileDescriptorProto_Service_field_number)) + default: + return pref.SourceLocation{} + } + case pref.MethodDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.ServiceDescriptor: + path = append(path, int32(genid.ServiceDescriptorProto_Method_field_number)) + default: + return pref.SourceLocation{} + } + default: + return pref.SourceLocation{} + } + } } +func (p *SourceLocations) lazyInit() *SourceLocations { + p.once.Do(func() { + if len(p.List) > 0 { + // Collect all the indexes for a given path. + pathIdxs := make(map[pathKey][]int, len(p.List)) + for i, l := range p.List { + k := newPathKey(l.Path) + pathIdxs[k] = append(pathIdxs[k], i) + } -func (p *SourceLocations) Len() int { return len(p.List) } -func (p *SourceLocations) Get(i int) pref.SourceLocation { return p.List[i] } + // Update the next index for all locations. + p.byPath = make(map[pathKey]int, len(p.List)) + for k, idxs := range pathIdxs { + for i := 0; i < len(idxs)-1; i++ { + p.List[idxs[i]].Next = idxs[i+1] + } + p.List[idxs[len(idxs)-1]].Next = 0 + p.byPath[k] = idxs[0] // record the first location for this path + } + } + }) + return p +} func (p *SourceLocations) ProtoInternal(pragma.DoNotImplement) {} + +// pathKey is a comparable representation of protoreflect.SourcePath. +type pathKey struct { + arr [16]uint8 // first n-1 path segments; last element is the length + str string // used if the path does not fit in arr +} + +func newPathKey(p pref.SourcePath) (k pathKey) { + if len(p) < len(k.arr) { + for i, ps := range p { + if ps < 0 || math.MaxUint8 <= ps { + return pathKey{str: p.String()} + } + k.arr[i] = uint8(ps) + } + k.arr[len(k.arr)-1] = uint8(len(p)) + return k + } + return pathKey{str: p.String()} +} diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_list_gen.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list_gen.go index 6a8825e8027..30db19fdc75 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc_list_gen.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list_gen.go @@ -142,6 +142,7 @@ type Fields struct { once sync.Once byName map[protoreflect.Name]*Field // protected by once byJSON map[string]*Field // protected by once + byText map[string]*Field // protected by once byNum map[protoreflect.FieldNumber]*Field // protected by once } @@ -163,6 +164,12 @@ func (p *Fields) ByJSONName(s string) protoreflect.FieldDescriptor { } return nil } +func (p *Fields) ByTextName(s string) protoreflect.FieldDescriptor { + if d := p.lazyInit().byText[s]; d != nil { + return d + } + return nil +} func (p *Fields) ByNumber(n protoreflect.FieldNumber) protoreflect.FieldDescriptor { if d := p.lazyInit().byNum[n]; d != nil { return d @@ -178,6 +185,7 @@ func (p *Fields) lazyInit() *Fields { if len(p.List) > 0 { p.byName = make(map[protoreflect.Name]*Field, len(p.List)) p.byJSON = make(map[string]*Field, len(p.List)) + p.byText = make(map[string]*Field, len(p.List)) p.byNum = make(map[protoreflect.FieldNumber]*Field, len(p.List)) for i := range p.List { d := &p.List[i] @@ -187,6 +195,9 @@ func (p *Fields) lazyInit() *Fields { if _, ok := p.byJSON[d.JSONName()]; !ok { p.byJSON[d.JSONName()] = d } + if _, ok := p.byText[d.TextName()]; !ok { + p.byText[d.TextName()] = d + } if _, ok := p.byNum[d.Number()]; !ok { p.byNum[d.Number()] = d } diff --git a/vendor/google.golang.org/protobuf/internal/impl/api_export.go b/vendor/google.golang.org/protobuf/internal/impl/api_export.go index b5974528db6..abee5f30e9f 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/api_export.go +++ b/vendor/google.golang.org/protobuf/internal/impl/api_export.go @@ -167,7 +167,7 @@ func (Export) MessageTypeOf(m message) pref.MessageType { if mv := (Export{}).protoMessageV2Of(m); mv != nil { return mv.ProtoReflect().Type() } - return legacyLoadMessageInfo(reflect.TypeOf(m), "") + return legacyLoadMessageType(reflect.TypeOf(m), "") } // MessageStringOf returns the message value as a string, diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_field.go b/vendor/google.golang.org/protobuf/internal/impl/codec_field.go index c00744d385c..cb4b482d166 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/codec_field.go +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_field.go @@ -10,6 +10,7 @@ import ( "sync" "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/proto" pref "google.golang.org/protobuf/reflect/protoreflect" preg "google.golang.org/protobuf/reflect/protoregistry" @@ -20,6 +21,7 @@ type errInvalidUTF8 struct{} func (errInvalidUTF8) Error() string { return "string field contains invalid UTF-8" } func (errInvalidUTF8) InvalidUTF8() bool { return true } +func (errInvalidUTF8) Unwrap() error { return errors.Error } // initOneofFieldCoders initializes the fast-path functions for the fields in a oneof. // @@ -242,7 +244,7 @@ func consumeMessageInfo(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if p.Elem().IsNil() { p.SetPointer(pointerOfValue(reflect.New(f.mi.GoReflectType.Elem()))) @@ -276,7 +278,7 @@ func consumeMessage(b []byte, m proto.Message, wtyp protowire.Type, opts unmarsh } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ Buf: v, @@ -420,7 +422,7 @@ func consumeGroup(b []byte, m proto.Message, num protowire.Number, wtyp protowir } b, n := protowire.ConsumeGroup(num, b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ Buf: b, @@ -494,7 +496,7 @@ func consumeMessageSliceInfo(b []byte, p pointer, wtyp protowire.Type, f *coderF } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } m := reflect.New(f.mi.GoReflectType.Elem()).Interface() mp := pointerOfIface(m) @@ -550,7 +552,7 @@ func consumeMessageSlice(b []byte, p pointer, goType reflect.Type, wtyp protowir } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } mp := reflect.New(goType.Elem()) o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ @@ -613,7 +615,7 @@ func consumeMessageSliceValue(b []byte, listv pref.Value, _ protowire.Number, wt } v, n := protowire.ConsumeBytes(b) if n < 0 { - return pref.Value{}, out, protowire.ParseError(n) + return pref.Value{}, out, errDecode } m := list.NewElement() o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ @@ -681,7 +683,7 @@ func consumeGroupSliceValue(b []byte, listv pref.Value, num protowire.Number, wt } b, n := protowire.ConsumeGroup(num, b) if n < 0 { - return pref.Value{}, out, protowire.ParseError(n) + return pref.Value{}, out, errDecode } m := list.NewElement() o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ @@ -767,7 +769,7 @@ func consumeGroupSlice(b []byte, p pointer, num protowire.Number, wtyp protowire } b, n := protowire.ConsumeGroup(num, b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } mp := reflect.New(goType.Elem()) o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go b/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go index ff198d0a153..98aaf2d67b8 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go @@ -45,7 +45,7 @@ func consumeBool(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, _ v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Bool() = protowire.DecodeBool(v) out.n = n @@ -121,7 +121,7 @@ func consumeBoolPtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.BoolPtr() if *vp == nil { @@ -165,7 +165,7 @@ func consumeBoolSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { var v uint64 @@ -180,7 +180,7 @@ func consumeBoolSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, protowire.DecodeBool(v)) b = b[n:] @@ -204,7 +204,7 @@ func consumeBoolSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, protowire.DecodeBool(v)) out.n = n @@ -285,7 +285,7 @@ func consumeBoolValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp p v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfBool(protowire.DecodeBool(v)), out, nil @@ -325,7 +325,7 @@ func consumeBoolSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numbe if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { var v uint64 @@ -340,7 +340,7 @@ func consumeBoolSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numbe v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfBool(protowire.DecodeBool(v))) b = b[n:] @@ -363,7 +363,7 @@ func consumeBoolSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numbe v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfBool(protowire.DecodeBool(v))) out.n = n @@ -449,7 +449,7 @@ func consumeEnumValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp p v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfEnum(protoreflect.EnumNumber(v)), out, nil @@ -489,7 +489,7 @@ func consumeEnumSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numbe if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { var v uint64 @@ -504,7 +504,7 @@ func consumeEnumSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numbe v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfEnum(protoreflect.EnumNumber(v))) b = b[n:] @@ -527,7 +527,7 @@ func consumeEnumSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numbe v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfEnum(protoreflect.EnumNumber(v))) out.n = n @@ -615,7 +615,7 @@ func consumeInt32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, _ v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Int32() = int32(v) out.n = n @@ -691,7 +691,7 @@ func consumeInt32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Int32Ptr() if *vp == nil { @@ -735,7 +735,7 @@ func consumeInt32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { var v uint64 @@ -750,7 +750,7 @@ func consumeInt32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, int32(v)) b = b[n:] @@ -774,7 +774,7 @@ func consumeInt32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, int32(v)) out.n = n @@ -855,7 +855,7 @@ func consumeInt32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfInt32(int32(v)), out, nil @@ -895,7 +895,7 @@ func consumeInt32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { var v uint64 @@ -910,7 +910,7 @@ func consumeInt32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt32(int32(v))) b = b[n:] @@ -933,7 +933,7 @@ func consumeInt32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt32(int32(v))) out.n = n @@ -1021,7 +1021,7 @@ func consumeSint32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Int32() = int32(protowire.DecodeZigZag(v & math.MaxUint32)) out.n = n @@ -1097,7 +1097,7 @@ func consumeSint32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Int32Ptr() if *vp == nil { @@ -1141,7 +1141,7 @@ func consumeSint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { var v uint64 @@ -1156,7 +1156,7 @@ func consumeSint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, int32(protowire.DecodeZigZag(v&math.MaxUint32))) b = b[n:] @@ -1180,7 +1180,7 @@ func consumeSint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, int32(protowire.DecodeZigZag(v&math.MaxUint32))) out.n = n @@ -1261,7 +1261,7 @@ func consumeSint32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32))), out, nil @@ -1301,7 +1301,7 @@ func consumeSint32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { var v uint64 @@ -1316,7 +1316,7 @@ func consumeSint32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32)))) b = b[n:] @@ -1339,7 +1339,7 @@ func consumeSint32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32)))) out.n = n @@ -1427,7 +1427,7 @@ func consumeUint32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Uint32() = uint32(v) out.n = n @@ -1503,7 +1503,7 @@ func consumeUint32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Uint32Ptr() if *vp == nil { @@ -1547,7 +1547,7 @@ func consumeUint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { var v uint64 @@ -1562,7 +1562,7 @@ func consumeUint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, uint32(v)) b = b[n:] @@ -1586,7 +1586,7 @@ func consumeUint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, uint32(v)) out.n = n @@ -1667,7 +1667,7 @@ func consumeUint32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfUint32(uint32(v)), out, nil @@ -1707,7 +1707,7 @@ func consumeUint32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { var v uint64 @@ -1722,7 +1722,7 @@ func consumeUint32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfUint32(uint32(v))) b = b[n:] @@ -1745,7 +1745,7 @@ func consumeUint32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfUint32(uint32(v))) out.n = n @@ -1833,7 +1833,7 @@ func consumeInt64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, _ v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Int64() = int64(v) out.n = n @@ -1909,7 +1909,7 @@ func consumeInt64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Int64Ptr() if *vp == nil { @@ -1953,7 +1953,7 @@ func consumeInt64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { var v uint64 @@ -1968,7 +1968,7 @@ func consumeInt64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, int64(v)) b = b[n:] @@ -1992,7 +1992,7 @@ func consumeInt64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, int64(v)) out.n = n @@ -2073,7 +2073,7 @@ func consumeInt64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfInt64(int64(v)), out, nil @@ -2113,7 +2113,7 @@ func consumeInt64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { var v uint64 @@ -2128,7 +2128,7 @@ func consumeInt64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt64(int64(v))) b = b[n:] @@ -2151,7 +2151,7 @@ func consumeInt64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt64(int64(v))) out.n = n @@ -2239,7 +2239,7 @@ func consumeSint64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Int64() = protowire.DecodeZigZag(v) out.n = n @@ -2315,7 +2315,7 @@ func consumeSint64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Int64Ptr() if *vp == nil { @@ -2359,7 +2359,7 @@ func consumeSint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { var v uint64 @@ -2374,7 +2374,7 @@ func consumeSint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, protowire.DecodeZigZag(v)) b = b[n:] @@ -2398,7 +2398,7 @@ func consumeSint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, protowire.DecodeZigZag(v)) out.n = n @@ -2479,7 +2479,7 @@ func consumeSint64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfInt64(protowire.DecodeZigZag(v)), out, nil @@ -2519,7 +2519,7 @@ func consumeSint64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { var v uint64 @@ -2534,7 +2534,7 @@ func consumeSint64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt64(protowire.DecodeZigZag(v))) b = b[n:] @@ -2557,7 +2557,7 @@ func consumeSint64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt64(protowire.DecodeZigZag(v))) out.n = n @@ -2645,7 +2645,7 @@ func consumeUint64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Uint64() = v out.n = n @@ -2721,7 +2721,7 @@ func consumeUint64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Uint64Ptr() if *vp == nil { @@ -2765,7 +2765,7 @@ func consumeUint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { var v uint64 @@ -2780,7 +2780,7 @@ func consumeUint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, v) b = b[n:] @@ -2804,7 +2804,7 @@ func consumeUint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI v, n = protowire.ConsumeVarint(b) } if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, v) out.n = n @@ -2885,7 +2885,7 @@ func consumeUint64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfUint64(v), out, nil @@ -2925,7 +2925,7 @@ func consumeUint64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { var v uint64 @@ -2940,7 +2940,7 @@ func consumeUint64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfUint64(v)) b = b[n:] @@ -2963,7 +2963,7 @@ func consumeUint64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Num v, n = protowire.ConsumeVarint(b) } if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfUint64(v)) out.n = n @@ -3041,7 +3041,7 @@ func consumeSfixed32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Int32() = int32(v) out.n = n @@ -3106,7 +3106,7 @@ func consumeSfixed32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Int32Ptr() if *vp == nil { @@ -3148,12 +3148,12 @@ func consumeSfixed32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFiel s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, int32(v)) b = b[n:] @@ -3167,7 +3167,7 @@ func consumeSfixed32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFiel } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, int32(v)) out.n = n @@ -3232,7 +3232,7 @@ func consumeSfixed32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wt } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfInt32(int32(v)), out, nil @@ -3269,12 +3269,12 @@ func consumeSfixed32SliceValue(b []byte, listv protoreflect.Value, _ protowire.N if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt32(int32(v))) b = b[n:] @@ -3287,7 +3287,7 @@ func consumeSfixed32SliceValue(b []byte, listv protoreflect.Value, _ protowire.N } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt32(int32(v))) out.n = n @@ -3357,7 +3357,7 @@ func consumeFixed32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Uint32() = v out.n = n @@ -3422,7 +3422,7 @@ func consumeFixed32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Uint32Ptr() if *vp == nil { @@ -3464,12 +3464,12 @@ func consumeFixed32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderField s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, v) b = b[n:] @@ -3483,7 +3483,7 @@ func consumeFixed32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderField } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, v) out.n = n @@ -3548,7 +3548,7 @@ func consumeFixed32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wty } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfUint32(uint32(v)), out, nil @@ -3585,12 +3585,12 @@ func consumeFixed32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Nu if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfUint32(uint32(v))) b = b[n:] @@ -3603,7 +3603,7 @@ func consumeFixed32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Nu } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfUint32(uint32(v))) out.n = n @@ -3673,7 +3673,7 @@ func consumeFloat(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, _ } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Float32() = math.Float32frombits(v) out.n = n @@ -3738,7 +3738,7 @@ func consumeFloatPtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Float32Ptr() if *vp == nil { @@ -3780,12 +3780,12 @@ func consumeFloatSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, math.Float32frombits(v)) b = b[n:] @@ -3799,7 +3799,7 @@ func consumeFloatSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, math.Float32frombits(v)) out.n = n @@ -3864,7 +3864,7 @@ func consumeFloatValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v))), out, nil @@ -3901,12 +3901,12 @@ func consumeFloatSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v)))) b = b[n:] @@ -3919,7 +3919,7 @@ func consumeFloatSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v)))) out.n = n @@ -3989,7 +3989,7 @@ func consumeSfixed64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Int64() = int64(v) out.n = n @@ -4054,7 +4054,7 @@ func consumeSfixed64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Int64Ptr() if *vp == nil { @@ -4096,12 +4096,12 @@ func consumeSfixed64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFiel s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, int64(v)) b = b[n:] @@ -4115,7 +4115,7 @@ func consumeSfixed64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFiel } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, int64(v)) out.n = n @@ -4180,7 +4180,7 @@ func consumeSfixed64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wt } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfInt64(int64(v)), out, nil @@ -4217,12 +4217,12 @@ func consumeSfixed64SliceValue(b []byte, listv protoreflect.Value, _ protowire.N if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt64(int64(v))) b = b[n:] @@ -4235,7 +4235,7 @@ func consumeSfixed64SliceValue(b []byte, listv protoreflect.Value, _ protowire.N } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfInt64(int64(v))) out.n = n @@ -4305,7 +4305,7 @@ func consumeFixed64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Uint64() = v out.n = n @@ -4370,7 +4370,7 @@ func consumeFixed64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Uint64Ptr() if *vp == nil { @@ -4412,12 +4412,12 @@ func consumeFixed64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderField s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, v) b = b[n:] @@ -4431,7 +4431,7 @@ func consumeFixed64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderField } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, v) out.n = n @@ -4496,7 +4496,7 @@ func consumeFixed64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wty } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfUint64(v), out, nil @@ -4533,12 +4533,12 @@ func consumeFixed64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Nu if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfUint64(v)) b = b[n:] @@ -4551,7 +4551,7 @@ func consumeFixed64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Nu } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfUint64(v)) out.n = n @@ -4621,7 +4621,7 @@ func consumeDouble(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Float64() = math.Float64frombits(v) out.n = n @@ -4686,7 +4686,7 @@ func consumeDoublePtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.Float64Ptr() if *vp == nil { @@ -4728,12 +4728,12 @@ func consumeDoubleSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI s := *sp b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } s = append(s, math.Float64frombits(v)) b = b[n:] @@ -4747,7 +4747,7 @@ func consumeDoubleSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, math.Float64frombits(v)) out.n = n @@ -4812,7 +4812,7 @@ func consumeDoubleValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfFloat64(math.Float64frombits(v)), out, nil @@ -4849,12 +4849,12 @@ func consumeDoubleSliceValue(b []byte, listv protoreflect.Value, _ protowire.Num if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfFloat64(math.Float64frombits(v))) b = b[n:] @@ -4867,7 +4867,7 @@ func consumeDoubleSliceValue(b []byte, listv protoreflect.Value, _ protowire.Num } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfFloat64(math.Float64frombits(v))) out.n = n @@ -4937,7 +4937,7 @@ func consumeString(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, } v, n := protowire.ConsumeString(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.String() = v out.n = n @@ -4969,7 +4969,7 @@ func consumeStringValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *code } v, n := protowire.ConsumeString(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if !utf8.ValidString(v) { return out, errInvalidUTF8{} @@ -5060,7 +5060,7 @@ func consumeStringPtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf } v, n := protowire.ConsumeString(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } vp := p.StringPtr() if *vp == nil { @@ -5097,7 +5097,7 @@ func consumeStringPtrValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *c } v, n := protowire.ConsumeString(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if !utf8.ValidString(v) { return out, errInvalidUTF8{} @@ -5145,7 +5145,7 @@ func consumeStringSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI } v, n := protowire.ConsumeString(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, v) out.n = n @@ -5180,7 +5180,7 @@ func consumeStringSliceValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f } v, n := protowire.ConsumeString(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if !utf8.ValidString(v) { return out, errInvalidUTF8{} @@ -5216,7 +5216,7 @@ func consumeStringValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp } v, n := protowire.ConsumeString(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfString(string(v)), out, nil @@ -5246,7 +5246,7 @@ func consumeStringValueValidateUTF8(b []byte, _ protoreflect.Value, _ protowire. } v, n := protowire.ConsumeString(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } if !utf8.ValidString(v) { return protoreflect.Value{}, out, errInvalidUTF8{} @@ -5291,7 +5291,7 @@ func consumeStringSliceValue(b []byte, listv protoreflect.Value, _ protowire.Num } v, n := protowire.ConsumeString(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfString(string(v))) out.n = n @@ -5326,7 +5326,7 @@ func consumeBytes(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, _ } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Bytes() = append(emptyBuf[:], v...) out.n = n @@ -5358,7 +5358,7 @@ func consumeBytesValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *coder } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if !utf8.Valid(v) { return out, errInvalidUTF8{} @@ -5405,7 +5405,7 @@ func consumeBytesNoZero(b []byte, p pointer, wtyp protowire.Type, f *coderFieldI } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *p.Bytes() = append(([]byte)(nil), v...) out.n = n @@ -5441,7 +5441,7 @@ func consumeBytesNoZeroValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if !utf8.Valid(v) { return out, errInvalidUTF8{} @@ -5485,7 +5485,7 @@ func consumeBytesSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldIn } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } *sp = append(*sp, append(emptyBuf[:], v...)) out.n = n @@ -5520,7 +5520,7 @@ func consumeBytesSliceValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f * } v, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if !utf8.Valid(v) { return out, errInvalidUTF8{} @@ -5556,7 +5556,7 @@ func consumeBytesValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp } v, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } out.n = n return protoreflect.ValueOfBytes(append(emptyBuf[:], v...)), out, nil @@ -5598,7 +5598,7 @@ func consumeBytesSliceValue(b []byte, listv protoreflect.Value, _ protowire.Numb } v, n := protowire.ConsumeBytes(b) if n < 0 { - return protoreflect.Value{}, out, protowire.ParseError(n) + return protoreflect.Value{}, out, errDecode } list.Append(protoreflect.ValueOfBytes(append(emptyBuf[:], v...))) out.n = n diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_map.go b/vendor/google.golang.org/protobuf/internal/impl/codec_map.go index 44885a761f6..c1245fef487 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/codec_map.go +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_map.go @@ -5,7 +5,6 @@ package impl import ( - "errors" "reflect" "sort" @@ -118,7 +117,7 @@ func consumeMap(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi *mapInfo } b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } var ( key = mapi.keyZero @@ -127,10 +126,10 @@ func consumeMap(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi *mapInfo for len(b) > 0 { num, wtyp, n := protowire.ConsumeTag(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if num > protowire.MaxValidNumber { - return out, errors.New("invalid field number") + return out, errDecode } b = b[n:] err := errUnknown @@ -157,7 +156,7 @@ func consumeMap(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi *mapInfo if err == errUnknown { n = protowire.ConsumeFieldValue(num, wtyp, b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } } else if err != nil { return out, err @@ -175,7 +174,7 @@ func consumeMapOfMessage(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi } b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } var ( key = mapi.keyZero @@ -184,10 +183,10 @@ func consumeMapOfMessage(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi for len(b) > 0 { num, wtyp, n := protowire.ConsumeTag(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if num > protowire.MaxValidNumber { - return out, errors.New("invalid field number") + return out, errDecode } b = b[n:] err := errUnknown @@ -208,7 +207,7 @@ func consumeMapOfMessage(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi var v []byte v, n = protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } var o unmarshalOutput o, err = f.mi.unmarshalPointer(v, pointerOfValue(val), 0, opts) @@ -221,7 +220,7 @@ func consumeMapOfMessage(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi if err == errUnknown { n = protowire.ConsumeFieldValue(num, wtyp, b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } } else if err != nil { return out, err diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_message.go b/vendor/google.golang.org/protobuf/internal/impl/codec_message.go index 0e176d565d4..e10864274e6 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/codec_message.go +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_message.go @@ -11,7 +11,7 @@ import ( "google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/internal/encoding/messageset" - "google.golang.org/protobuf/internal/fieldsort" + "google.golang.org/protobuf/internal/order" pref "google.golang.org/protobuf/reflect/protoreflect" piface "google.golang.org/protobuf/runtime/protoiface" ) @@ -27,6 +27,7 @@ type coderMessageInfo struct { coderFields map[protowire.Number]*coderFieldInfo sizecacheOffset offset unknownOffset offset + unknownPtrKind bool extensionOffset offset needsInitCheck bool isMessageSet bool @@ -47,9 +48,20 @@ type coderFieldInfo struct { } func (mi *MessageInfo) makeCoderMethods(t reflect.Type, si structInfo) { - mi.sizecacheOffset = si.sizecacheOffset - mi.unknownOffset = si.unknownOffset - mi.extensionOffset = si.extensionOffset + mi.sizecacheOffset = invalidOffset + mi.unknownOffset = invalidOffset + mi.extensionOffset = invalidOffset + + if si.sizecacheOffset.IsValid() && si.sizecacheType == sizecacheType { + mi.sizecacheOffset = si.sizecacheOffset + } + if si.unknownOffset.IsValid() && (si.unknownType == unknownFieldsAType || si.unknownType == unknownFieldsBType) { + mi.unknownOffset = si.unknownOffset + mi.unknownPtrKind = si.unknownType.Kind() == reflect.Ptr + } + if si.extensionOffset.IsValid() && si.extensionType == extensionFieldsType { + mi.extensionOffset = si.extensionOffset + } mi.coderFields = make(map[protowire.Number]*coderFieldInfo) fields := mi.Desc.Fields() @@ -136,7 +148,7 @@ func (mi *MessageInfo) makeCoderMethods(t reflect.Type, si structInfo) { sort.Slice(mi.orderedCoderFields, func(i, j int) bool { fi := fields.ByNumber(mi.orderedCoderFields[i].num) fj := fields.ByNumber(mi.orderedCoderFields[j].num) - return fieldsort.Less(fi, fj) + return order.LegacyFieldOrder(fi, fj) }) } @@ -157,3 +169,28 @@ func (mi *MessageInfo) makeCoderMethods(t reflect.Type, si structInfo) { mi.methods.Merge = mi.merge } } + +// getUnknownBytes returns a *[]byte for the unknown fields. +// It is the caller's responsibility to check whether the pointer is nil. +// This function is specially designed to be inlineable. +func (mi *MessageInfo) getUnknownBytes(p pointer) *[]byte { + if mi.unknownPtrKind { + return *p.Apply(mi.unknownOffset).BytesPtr() + } else { + return p.Apply(mi.unknownOffset).Bytes() + } +} + +// mutableUnknownBytes returns a *[]byte for the unknown fields. +// The returned pointer is guaranteed to not be nil. +func (mi *MessageInfo) mutableUnknownBytes(p pointer) *[]byte { + if mi.unknownPtrKind { + bp := p.Apply(mi.unknownOffset).BytesPtr() + if *bp == nil { + *bp = new([]byte) + } + return *bp + } else { + return p.Apply(mi.unknownOffset).Bytes() + } +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_messageset.go b/vendor/google.golang.org/protobuf/internal/impl/codec_messageset.go index cfb68e12fbf..b7a23faf1e4 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/codec_messageset.go +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_messageset.go @@ -29,8 +29,9 @@ func sizeMessageSet(mi *MessageInfo, p pointer, opts marshalOptions) (size int) size += xi.funcs.size(x.Value(), protowire.SizeTag(messageset.FieldMessage), opts) } - unknown := *p.Apply(mi.unknownOffset).Bytes() - size += messageset.SizeUnknown(unknown) + if u := mi.getUnknownBytes(p); u != nil { + size += messageset.SizeUnknown(*u) + } return size } @@ -69,10 +70,12 @@ func marshalMessageSet(mi *MessageInfo, b []byte, p pointer, opts marshalOptions } } - unknown := *p.Apply(mi.unknownOffset).Bytes() - b, err := messageset.AppendUnknown(b, unknown) - if err != nil { - return b, err + if u := mi.getUnknownBytes(p); u != nil { + var err error + b, err = messageset.AppendUnknown(b, *u) + if err != nil { + return b, err + } } return b, nil @@ -100,13 +103,13 @@ func unmarshalMessageSet(mi *MessageInfo, b []byte, p pointer, opts unmarshalOpt *ep = make(map[int32]ExtensionField) } ext := *ep - unknown := p.Apply(mi.unknownOffset).Bytes() initialized := true err = messageset.Unmarshal(b, true, func(num protowire.Number, v []byte) error { o, err := mi.unmarshalExtension(v, num, protowire.BytesType, ext, opts) if err == errUnknown { - *unknown = protowire.AppendTag(*unknown, num, protowire.BytesType) - *unknown = append(*unknown, v...) + u := mi.mutableUnknownBytes(p) + *u = protowire.AppendTag(*u, num, protowire.BytesType) + *u = append(*u, v...) return nil } if !o.initialized { diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_reflect.go b/vendor/google.golang.org/protobuf/internal/impl/codec_reflect.go index 86f7dc3c9d7..90705e3aea7 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/codec_reflect.go +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_reflect.go @@ -30,7 +30,7 @@ func consumeEnum(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, _ } v, n := protowire.ConsumeVarint(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } p.v.Elem().SetInt(int64(v)) out.n = n @@ -130,12 +130,12 @@ func consumeEnumSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf if wtyp == protowire.BytesType { b, n := protowire.ConsumeBytes(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } for len(b) > 0 { v, n := protowire.ConsumeVarint(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } rv := reflect.New(s.Type().Elem()).Elem() rv.SetInt(int64(v)) @@ -150,7 +150,7 @@ func consumeEnumSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInf } v, n := protowire.ConsumeVarint(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } rv := reflect.New(s.Type().Elem()).Elem() rv.SetInt(int64(v)) diff --git a/vendor/google.golang.org/protobuf/internal/impl/decode.go b/vendor/google.golang.org/protobuf/internal/impl/decode.go index 85ba1d3b334..949dc49a65b 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/decode.go +++ b/vendor/google.golang.org/protobuf/internal/impl/decode.go @@ -17,6 +17,8 @@ import ( piface "google.golang.org/protobuf/runtime/protoiface" ) +var errDecode = errors.New("cannot parse invalid wire-format data") + type unmarshalOptions struct { flags protoiface.UnmarshalInputFlags resolver interface { @@ -100,13 +102,13 @@ func (mi *MessageInfo) unmarshalPointer(b []byte, p pointer, groupTag protowire. var n int tag, n = protowire.ConsumeVarint(b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } b = b[n:] } var num protowire.Number if n := tag >> 3; n < uint64(protowire.MinValidNumber) || n > uint64(protowire.MaxValidNumber) { - return out, errors.New("invalid field number") + return out, errDecode } else { num = protowire.Number(n) } @@ -114,7 +116,7 @@ func (mi *MessageInfo) unmarshalPointer(b []byte, p pointer, groupTag protowire. if wtyp == protowire.EndGroupType { if num != groupTag { - return out, errors.New("mismatching end group marker") + return out, errDecode } groupTag = 0 break @@ -170,10 +172,10 @@ func (mi *MessageInfo) unmarshalPointer(b []byte, p pointer, groupTag protowire. } n = protowire.ConsumeFieldValue(num, wtyp, b) if n < 0 { - return out, protowire.ParseError(n) + return out, errDecode } if !opts.DiscardUnknown() && mi.unknownOffset.IsValid() { - u := p.Apply(mi.unknownOffset).Bytes() + u := mi.mutableUnknownBytes(p) *u = protowire.AppendTag(*u, num, wtyp) *u = append(*u, b[:n]...) } @@ -181,7 +183,7 @@ func (mi *MessageInfo) unmarshalPointer(b []byte, p pointer, groupTag protowire. b = b[n:] } if groupTag != 0 { - return out, errors.New("missing end group marker") + return out, errDecode } if mi.numRequiredFields > 0 && bits.OnesCount64(requiredMask) != int(mi.numRequiredFields) { initialized = false @@ -221,7 +223,7 @@ func (mi *MessageInfo) unmarshalExtension(b []byte, num protowire.Number, wtyp p return out, nil } case ValidationInvalid: - return out, errors.New("invalid wire format") + return out, errDecode case ValidationUnknown: } } diff --git a/vendor/google.golang.org/protobuf/internal/impl/encode.go b/vendor/google.golang.org/protobuf/internal/impl/encode.go index 8c8a794c631..845c67d6e7e 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/encode.go +++ b/vendor/google.golang.org/protobuf/internal/impl/encode.go @@ -79,8 +79,9 @@ func (mi *MessageInfo) sizePointerSlow(p pointer, opts marshalOptions) (size int size += f.funcs.size(fptr, f, opts) } if mi.unknownOffset.IsValid() { - u := *p.Apply(mi.unknownOffset).Bytes() - size += len(u) + if u := mi.getUnknownBytes(p); u != nil { + size += len(*u) + } } if mi.sizecacheOffset.IsValid() { if size > math.MaxInt32 { @@ -141,8 +142,9 @@ func (mi *MessageInfo) marshalAppendPointer(b []byte, p pointer, opts marshalOpt } } if mi.unknownOffset.IsValid() && !mi.isMessageSet { - u := *p.Apply(mi.unknownOffset).Bytes() - b = append(b, u...) + if u := mi.getUnknownBytes(p); u != nil { + b = append(b, (*u)...) + } } return b, nil } diff --git a/vendor/google.golang.org/protobuf/internal/impl/legacy_export.go b/vendor/google.golang.org/protobuf/internal/impl/legacy_export.go index c3d741c2f0c..e3fb0b57858 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/legacy_export.go +++ b/vendor/google.golang.org/protobuf/internal/impl/legacy_export.go @@ -30,7 +30,7 @@ func (Export) LegacyMessageTypeOf(m piface.MessageV1, name pref.FullName) pref.M if mv := (Export{}).protoMessageV2Of(m); mv != nil { return mv.ProtoReflect().Type() } - return legacyLoadMessageInfo(reflect.TypeOf(m), name) + return legacyLoadMessageType(reflect.TypeOf(m), name) } // UnmarshalJSONEnum unmarshals an enum from a JSON-encoded input. diff --git a/vendor/google.golang.org/protobuf/internal/impl/legacy_extension.go b/vendor/google.golang.org/protobuf/internal/impl/legacy_extension.go index 61757ce50a7..49e723161c0 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/legacy_extension.go +++ b/vendor/google.golang.org/protobuf/internal/impl/legacy_extension.go @@ -154,7 +154,8 @@ func (x placeholderExtension) Number() pref.FieldNumber { retu func (x placeholderExtension) Cardinality() pref.Cardinality { return 0 } func (x placeholderExtension) Kind() pref.Kind { return 0 } func (x placeholderExtension) HasJSONName() bool { return false } -func (x placeholderExtension) JSONName() string { return "" } +func (x placeholderExtension) JSONName() string { return "[" + string(x.name) + "]" } +func (x placeholderExtension) TextName() string { return "[" + string(x.name) + "]" } func (x placeholderExtension) HasPresence() bool { return false } func (x placeholderExtension) HasOptionalKeyword() bool { return false } func (x placeholderExtension) IsExtension() bool { return true } diff --git a/vendor/google.golang.org/protobuf/internal/impl/legacy_message.go b/vendor/google.golang.org/protobuf/internal/impl/legacy_message.go index 06c68e11702..b1d66c5c548 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/legacy_message.go +++ b/vendor/google.golang.org/protobuf/internal/impl/legacy_message.go @@ -32,6 +32,16 @@ func legacyWrapMessage(v reflect.Value) pref.Message { return mt.MessageOf(v.Interface()) } +// legacyLoadMessageType dynamically loads a protoreflect.Type for t, +// where t must be not implement the v2 API already. +// The provided name is used if it cannot be determined from the message. +func legacyLoadMessageType(t reflect.Type, name pref.FullName) protoreflect.MessageType { + if t.Kind() != reflect.Ptr || t.Elem().Kind() != reflect.Struct { + return aberrantMessageType{t} + } + return legacyLoadMessageInfo(t, name) +} + var legacyMessageTypeCache sync.Map // map[reflect.Type]*MessageInfo // legacyLoadMessageInfo dynamically loads a *MessageInfo for t, @@ -370,7 +380,7 @@ type legacyMerger interface { Merge(protoiface.MessageV1) } -var legacyProtoMethods = &piface.Methods{ +var aberrantProtoMethods = &piface.Methods{ Marshal: legacyMarshal, Unmarshal: legacyUnmarshal, Merge: legacyMerge, @@ -495,7 +505,7 @@ func (m aberrantMessage) IsValid() bool { return true } func (m aberrantMessage) ProtoMethods() *piface.Methods { - return legacyProtoMethods + return aberrantProtoMethods } func (m aberrantMessage) protoUnwrap() interface{} { return m.v.Interface() diff --git a/vendor/google.golang.org/protobuf/internal/impl/merge.go b/vendor/google.golang.org/protobuf/internal/impl/merge.go index cdc4267dfad..c65bbc0446e 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/merge.go +++ b/vendor/google.golang.org/protobuf/internal/impl/merge.go @@ -77,9 +77,9 @@ func (mi *MessageInfo) mergePointer(dst, src pointer, opts mergeOptions) { } } if mi.unknownOffset.IsValid() { - du := dst.Apply(mi.unknownOffset).Bytes() - su := src.Apply(mi.unknownOffset).Bytes() - if len(*su) > 0 { + su := mi.getUnknownBytes(src) + if su != nil && len(*su) > 0 { + du := mi.mutableUnknownBytes(dst) *du = append(*du, *su...) } } diff --git a/vendor/google.golang.org/protobuf/internal/impl/message.go b/vendor/google.golang.org/protobuf/internal/impl/message.go index c026a98180d..a104e28e858 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/message.go +++ b/vendor/google.golang.org/protobuf/internal/impl/message.go @@ -15,6 +15,7 @@ import ( "google.golang.org/protobuf/internal/genid" "google.golang.org/protobuf/reflect/protoreflect" pref "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" ) // MessageInfo provides protobuf related functionality for a given Go type @@ -109,22 +110,29 @@ func (mi *MessageInfo) getPointer(m pref.Message) (p pointer, ok bool) { type ( SizeCache = int32 WeakFields = map[int32]protoreflect.ProtoMessage - UnknownFields = []byte + UnknownFields = unknownFieldsA // TODO: switch to unknownFieldsB + unknownFieldsA = []byte + unknownFieldsB = *[]byte ExtensionFields = map[int32]ExtensionField ) var ( sizecacheType = reflect.TypeOf(SizeCache(0)) weakFieldsType = reflect.TypeOf(WeakFields(nil)) - unknownFieldsType = reflect.TypeOf(UnknownFields(nil)) + unknownFieldsAType = reflect.TypeOf(unknownFieldsA(nil)) + unknownFieldsBType = reflect.TypeOf(unknownFieldsB(nil)) extensionFieldsType = reflect.TypeOf(ExtensionFields(nil)) ) type structInfo struct { sizecacheOffset offset + sizecacheType reflect.Type weakOffset offset + weakType reflect.Type unknownOffset offset + unknownType reflect.Type extensionOffset offset + extensionType reflect.Type fieldsByNumber map[pref.FieldNumber]reflect.StructField oneofsByName map[pref.Name]reflect.StructField @@ -151,18 +159,22 @@ fieldLoop: case genid.SizeCache_goname, genid.SizeCacheA_goname: if f.Type == sizecacheType { si.sizecacheOffset = offsetOf(f, mi.Exporter) + si.sizecacheType = f.Type } case genid.WeakFields_goname, genid.WeakFieldsA_goname: if f.Type == weakFieldsType { si.weakOffset = offsetOf(f, mi.Exporter) + si.weakType = f.Type } case genid.UnknownFields_goname, genid.UnknownFieldsA_goname: - if f.Type == unknownFieldsType { + if f.Type == unknownFieldsAType || f.Type == unknownFieldsBType { si.unknownOffset = offsetOf(f, mi.Exporter) + si.unknownType = f.Type } case genid.ExtensionFields_goname, genid.ExtensionFieldsA_goname, genid.ExtensionFieldsB_goname: if f.Type == extensionFieldsType { si.extensionOffset = offsetOf(f, mi.Exporter) + si.extensionType = f.Type } default: for _, s := range strings.Split(f.Tag.Get("protobuf"), ",") { @@ -212,4 +224,53 @@ func (mi *MessageInfo) New() protoreflect.Message { func (mi *MessageInfo) Zero() protoreflect.Message { return mi.MessageOf(reflect.Zero(mi.GoReflectType).Interface()) } -func (mi *MessageInfo) Descriptor() protoreflect.MessageDescriptor { return mi.Desc } +func (mi *MessageInfo) Descriptor() protoreflect.MessageDescriptor { + return mi.Desc +} +func (mi *MessageInfo) Enum(i int) protoreflect.EnumType { + mi.init() + fd := mi.Desc.Fields().Get(i) + return Export{}.EnumTypeOf(mi.fieldTypes[fd.Number()]) +} +func (mi *MessageInfo) Message(i int) protoreflect.MessageType { + mi.init() + fd := mi.Desc.Fields().Get(i) + switch { + case fd.IsWeak(): + mt, _ := preg.GlobalTypes.FindMessageByName(fd.Message().FullName()) + return mt + case fd.IsMap(): + return mapEntryType{fd.Message(), mi.fieldTypes[fd.Number()]} + default: + return Export{}.MessageTypeOf(mi.fieldTypes[fd.Number()]) + } +} + +type mapEntryType struct { + desc protoreflect.MessageDescriptor + valType interface{} // zero value of enum or message type +} + +func (mt mapEntryType) New() protoreflect.Message { + return nil +} +func (mt mapEntryType) Zero() protoreflect.Message { + return nil +} +func (mt mapEntryType) Descriptor() protoreflect.MessageDescriptor { + return mt.desc +} +func (mt mapEntryType) Enum(i int) protoreflect.EnumType { + fd := mt.desc.Fields().Get(i) + if fd.Enum() == nil { + return nil + } + return Export{}.EnumTypeOf(mt.valType) +} +func (mt mapEntryType) Message(i int) protoreflect.MessageType { + fd := mt.desc.Fields().Get(i) + if fd.Message() == nil { + return nil + } + return Export{}.MessageTypeOf(mt.valType) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/message_reflect.go b/vendor/google.golang.org/protobuf/internal/impl/message_reflect.go index 0f4b8db760a..0c6f106ae04 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/message_reflect.go +++ b/vendor/google.golang.org/protobuf/internal/impl/message_reflect.go @@ -8,6 +8,7 @@ import ( "fmt" "reflect" + "google.golang.org/protobuf/internal/detrand" "google.golang.org/protobuf/internal/pragma" pref "google.golang.org/protobuf/reflect/protoreflect" ) @@ -16,6 +17,11 @@ type reflectMessageInfo struct { fields map[pref.FieldNumber]*fieldInfo oneofs map[pref.Name]*oneofInfo + // fieldTypes contains the zero value of an enum or message field. + // For lists, it contains the element type. + // For maps, it contains the entry value type. + fieldTypes map[pref.FieldNumber]interface{} + // denseFields is a subset of fields where: // 0 < fieldDesc.Number() < len(denseFields) // It provides faster access to the fieldInfo, but may be incomplete. @@ -36,6 +42,7 @@ func (mi *MessageInfo) makeReflectFuncs(t reflect.Type, si structInfo) { mi.makeKnownFieldsFunc(si) mi.makeUnknownFieldsFunc(t, si) mi.makeExtensionFieldsFunc(t, si) + mi.makeFieldTypes(si) } // makeKnownFieldsFunc generates functions for operations that can be performed @@ -61,7 +68,7 @@ func (mi *MessageInfo) makeKnownFieldsFunc(si structInfo) { fi = fieldInfoForList(fd, fs, mi.Exporter) case fd.IsWeak(): fi = fieldInfoForWeakMessage(fd, si.weakOffset) - case fd.Kind() == pref.MessageKind || fd.Kind() == pref.GroupKind: + case fd.Message() != nil: fi = fieldInfoForMessage(fd, fs, mi.Exporter) default: fi = fieldInfoForScalar(fd, fs, mi.Exporter) @@ -92,27 +99,53 @@ func (mi *MessageInfo) makeKnownFieldsFunc(si structInfo) { i++ } } + + // Introduce instability to iteration order, but keep it deterministic. + if len(mi.rangeInfos) > 1 && detrand.Bool() { + i := detrand.Intn(len(mi.rangeInfos) - 1) + mi.rangeInfos[i], mi.rangeInfos[i+1] = mi.rangeInfos[i+1], mi.rangeInfos[i] + } } func (mi *MessageInfo) makeUnknownFieldsFunc(t reflect.Type, si structInfo) { - mi.getUnknown = func(pointer) pref.RawFields { return nil } - mi.setUnknown = func(pointer, pref.RawFields) { return } - if si.unknownOffset.IsValid() { + switch { + case si.unknownOffset.IsValid() && si.unknownType == unknownFieldsAType: + // Handle as []byte. mi.getUnknown = func(p pointer) pref.RawFields { if p.IsNil() { return nil } - rv := p.Apply(si.unknownOffset).AsValueOf(unknownFieldsType) - return pref.RawFields(*rv.Interface().(*[]byte)) + return *p.Apply(mi.unknownOffset).Bytes() } mi.setUnknown = func(p pointer, b pref.RawFields) { if p.IsNil() { panic("invalid SetUnknown on nil Message") } - rv := p.Apply(si.unknownOffset).AsValueOf(unknownFieldsType) - *rv.Interface().(*[]byte) = []byte(b) + *p.Apply(mi.unknownOffset).Bytes() = b } - } else { + case si.unknownOffset.IsValid() && si.unknownType == unknownFieldsBType: + // Handle as *[]byte. + mi.getUnknown = func(p pointer) pref.RawFields { + if p.IsNil() { + return nil + } + bp := p.Apply(mi.unknownOffset).BytesPtr() + if *bp == nil { + return nil + } + return **bp + } + mi.setUnknown = func(p pointer, b pref.RawFields) { + if p.IsNil() { + panic("invalid SetUnknown on nil Message") + } + bp := p.Apply(mi.unknownOffset).BytesPtr() + if *bp == nil { + *bp = new([]byte) + } + **bp = b + } + default: mi.getUnknown = func(pointer) pref.RawFields { return nil } @@ -139,6 +172,45 @@ func (mi *MessageInfo) makeExtensionFieldsFunc(t reflect.Type, si structInfo) { } } } +func (mi *MessageInfo) makeFieldTypes(si structInfo) { + md := mi.Desc + fds := md.Fields() + for i := 0; i < fds.Len(); i++ { + var ft reflect.Type + fd := fds.Get(i) + fs := si.fieldsByNumber[fd.Number()] + switch { + case fd.ContainingOneof() != nil && !fd.ContainingOneof().IsSynthetic(): + if fd.Enum() != nil || fd.Message() != nil { + ft = si.oneofWrappersByNumber[fd.Number()].Field(0).Type + } + case fd.IsMap(): + if fd.MapValue().Enum() != nil || fd.MapValue().Message() != nil { + ft = fs.Type.Elem() + } + case fd.IsList(): + if fd.Enum() != nil || fd.Message() != nil { + ft = fs.Type.Elem() + } + case fd.Enum() != nil: + ft = fs.Type + if fd.HasPresence() { + ft = ft.Elem() + } + case fd.Message() != nil: + ft = fs.Type + if fd.IsWeak() { + ft = nil + } + } + if ft != nil { + if mi.fieldTypes == nil { + mi.fieldTypes = make(map[pref.FieldNumber]interface{}) + } + mi.fieldTypes[fd.Number()] = reflect.Zero(ft).Interface() + } + } +} type extensionMap map[int32]ExtensionField @@ -306,7 +378,6 @@ var ( // pointer to a named Go struct. If the provided type has a ProtoReflect method, // it must be implemented by calling this method. func (mi *MessageInfo) MessageOf(m interface{}) pref.Message { - // TODO: Switch the input to be an opaque Pointer. if reflect.TypeOf(m) != mi.GoReflectType { panic(fmt.Sprintf("type mismatch: got %T, want %v", m, mi.GoReflectType)) } diff --git a/vendor/google.golang.org/protobuf/internal/impl/pointer_reflect.go b/vendor/google.golang.org/protobuf/internal/impl/pointer_reflect.go index 67b4ede6705..9e3ed821efb 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/pointer_reflect.go +++ b/vendor/google.golang.org/protobuf/internal/impl/pointer_reflect.go @@ -121,6 +121,7 @@ func (p pointer) String() *string { return p.v.Interface().(*string) } func (p pointer) StringPtr() **string { return p.v.Interface().(**string) } func (p pointer) StringSlice() *[]string { return p.v.Interface().(*[]string) } func (p pointer) Bytes() *[]byte { return p.v.Interface().(*[]byte) } +func (p pointer) BytesPtr() **[]byte { return p.v.Interface().(**[]byte) } func (p pointer) BytesSlice() *[][]byte { return p.v.Interface().(*[][]byte) } func (p pointer) WeakFields() *weakFields { return (*weakFields)(p.v.Interface().(*WeakFields)) } func (p pointer) Extensions() *map[int32]ExtensionField { diff --git a/vendor/google.golang.org/protobuf/internal/impl/pointer_unsafe.go b/vendor/google.golang.org/protobuf/internal/impl/pointer_unsafe.go index 088aa85d483..9ecf23a85bb 100644 --- a/vendor/google.golang.org/protobuf/internal/impl/pointer_unsafe.go +++ b/vendor/google.golang.org/protobuf/internal/impl/pointer_unsafe.go @@ -109,6 +109,7 @@ func (p pointer) String() *string { return (*string)(p.p) func (p pointer) StringPtr() **string { return (**string)(p.p) } func (p pointer) StringSlice() *[]string { return (*[]string)(p.p) } func (p pointer) Bytes() *[]byte { return (*[]byte)(p.p) } +func (p pointer) BytesPtr() **[]byte { return (**[]byte)(p.p) } func (p pointer) BytesSlice() *[][]byte { return (*[][]byte)(p.p) } func (p pointer) WeakFields() *weakFields { return (*weakFields)(p.p) } func (p pointer) Extensions() *map[int32]ExtensionField { return (*map[int32]ExtensionField)(p.p) } diff --git a/vendor/google.golang.org/protobuf/internal/mapsort/mapsort.go b/vendor/google.golang.org/protobuf/internal/mapsort/mapsort.go deleted file mode 100644 index a3de1cf3241..00000000000 --- a/vendor/google.golang.org/protobuf/internal/mapsort/mapsort.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package mapsort provides sorted access to maps. -package mapsort - -import ( - "sort" - - "google.golang.org/protobuf/reflect/protoreflect" -) - -// Range iterates over every map entry in sorted key order, -// calling f for each key and value encountered. -func Range(mapv protoreflect.Map, keyKind protoreflect.Kind, f func(protoreflect.MapKey, protoreflect.Value) bool) { - var keys []protoreflect.MapKey - mapv.Range(func(key protoreflect.MapKey, _ protoreflect.Value) bool { - keys = append(keys, key) - return true - }) - sort.Slice(keys, func(i, j int) bool { - switch keyKind { - case protoreflect.BoolKind: - return !keys[i].Bool() && keys[j].Bool() - case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind, - protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind: - return keys[i].Int() < keys[j].Int() - case protoreflect.Uint32Kind, protoreflect.Fixed32Kind, - protoreflect.Uint64Kind, protoreflect.Fixed64Kind: - return keys[i].Uint() < keys[j].Uint() - case protoreflect.StringKind: - return keys[i].String() < keys[j].String() - default: - panic("invalid kind: " + keyKind.String()) - } - }) - for _, key := range keys { - if !f(key, mapv.Get(key)) { - break - } - } -} diff --git a/vendor/google.golang.org/protobuf/internal/order/order.go b/vendor/google.golang.org/protobuf/internal/order/order.go new file mode 100644 index 00000000000..2a24953f6a4 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/order/order.go @@ -0,0 +1,89 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package order + +import ( + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// FieldOrder specifies the ordering to visit message fields. +// It is a function that reports whether x is ordered before y. +type FieldOrder func(x, y pref.FieldDescriptor) bool + +var ( + // AnyFieldOrder specifies no specific field ordering. + AnyFieldOrder FieldOrder = nil + + // LegacyFieldOrder sorts fields in the same ordering as emitted by + // wire serialization in the github.com/golang/protobuf implementation. + LegacyFieldOrder FieldOrder = func(x, y pref.FieldDescriptor) bool { + ox, oy := x.ContainingOneof(), y.ContainingOneof() + inOneof := func(od pref.OneofDescriptor) bool { + return od != nil && !od.IsSynthetic() + } + + // Extension fields sort before non-extension fields. + if x.IsExtension() != y.IsExtension() { + return x.IsExtension() && !y.IsExtension() + } + // Fields not within a oneof sort before those within a oneof. + if inOneof(ox) != inOneof(oy) { + return !inOneof(ox) && inOneof(oy) + } + // Fields in disjoint oneof sets are sorted by declaration index. + if ox != nil && oy != nil && ox != oy { + return ox.Index() < oy.Index() + } + // Fields sorted by field number. + return x.Number() < y.Number() + } + + // NumberFieldOrder sorts fields by their field number. + NumberFieldOrder FieldOrder = func(x, y pref.FieldDescriptor) bool { + return x.Number() < y.Number() + } + + // IndexNameFieldOrder sorts non-extension fields before extension fields. + // Non-extensions are sorted according to their declaration index. + // Extensions are sorted according to their full name. + IndexNameFieldOrder FieldOrder = func(x, y pref.FieldDescriptor) bool { + // Non-extension fields sort before extension fields. + if x.IsExtension() != y.IsExtension() { + return !x.IsExtension() && y.IsExtension() + } + // Extensions sorted by fullname. + if x.IsExtension() && y.IsExtension() { + return x.FullName() < y.FullName() + } + // Non-extensions sorted by declaration index. + return x.Index() < y.Index() + } +) + +// KeyOrder specifies the ordering to visit map entries. +// It is a function that reports whether x is ordered before y. +type KeyOrder func(x, y pref.MapKey) bool + +var ( + // AnyKeyOrder specifies no specific key ordering. + AnyKeyOrder KeyOrder = nil + + // GenericKeyOrder sorts false before true, numeric keys in ascending order, + // and strings in lexicographical ordering according to UTF-8 codepoints. + GenericKeyOrder KeyOrder = func(x, y pref.MapKey) bool { + switch x.Interface().(type) { + case bool: + return !x.Bool() && y.Bool() + case int32, int64: + return x.Int() < y.Int() + case uint32, uint64: + return x.Uint() < y.Uint() + case string: + return x.String() < y.String() + default: + panic("invalid map key type") + } + } +) diff --git a/vendor/google.golang.org/protobuf/internal/order/range.go b/vendor/google.golang.org/protobuf/internal/order/range.go new file mode 100644 index 00000000000..c8090e0c547 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/order/range.go @@ -0,0 +1,115 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package order provides ordered access to messages and maps. +package order + +import ( + "sort" + "sync" + + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type messageField struct { + fd pref.FieldDescriptor + v pref.Value +} + +var messageFieldPool = sync.Pool{ + New: func() interface{} { return new([]messageField) }, +} + +type ( + // FieldRnger is an interface for visiting all fields in a message. + // The protoreflect.Message type implements this interface. + FieldRanger interface{ Range(VisitField) } + // VisitField is called everytime a message field is visited. + VisitField = func(pref.FieldDescriptor, pref.Value) bool +) + +// RangeFields iterates over the fields of fs according to the specified order. +func RangeFields(fs FieldRanger, less FieldOrder, fn VisitField) { + if less == nil { + fs.Range(fn) + return + } + + // Obtain a pre-allocated scratch buffer. + p := messageFieldPool.Get().(*[]messageField) + fields := (*p)[:0] + defer func() { + if cap(fields) < 1024 { + *p = fields + messageFieldPool.Put(p) + } + }() + + // Collect all fields in the message and sort them. + fs.Range(func(fd pref.FieldDescriptor, v pref.Value) bool { + fields = append(fields, messageField{fd, v}) + return true + }) + sort.Slice(fields, func(i, j int) bool { + return less(fields[i].fd, fields[j].fd) + }) + + // Visit the fields in the specified ordering. + for _, f := range fields { + if !fn(f.fd, f.v) { + return + } + } +} + +type mapEntry struct { + k pref.MapKey + v pref.Value +} + +var mapEntryPool = sync.Pool{ + New: func() interface{} { return new([]mapEntry) }, +} + +type ( + // EntryRanger is an interface for visiting all fields in a message. + // The protoreflect.Map type implements this interface. + EntryRanger interface{ Range(VisitEntry) } + // VisitEntry is called everytime a map entry is visited. + VisitEntry = func(pref.MapKey, pref.Value) bool +) + +// RangeEntries iterates over the entries of es according to the specified order. +func RangeEntries(es EntryRanger, less KeyOrder, fn VisitEntry) { + if less == nil { + es.Range(fn) + return + } + + // Obtain a pre-allocated scratch buffer. + p := mapEntryPool.Get().(*[]mapEntry) + entries := (*p)[:0] + defer func() { + if cap(entries) < 1024 { + *p = entries + mapEntryPool.Put(p) + } + }() + + // Collect all entries in the map and sort them. + es.Range(func(k pref.MapKey, v pref.Value) bool { + entries = append(entries, mapEntry{k, v}) + return true + }) + sort.Slice(entries, func(i, j int) bool { + return less(entries[i].k, entries[j].k) + }) + + // Visit the entries in the specified ordering. + for _, e := range entries { + if !fn(e.k, e.v) { + return + } + } +} diff --git a/vendor/google.golang.org/protobuf/internal/version/version.go b/vendor/google.golang.org/protobuf/internal/version/version.go index 72cf770b427..72b24ee5ec6 100644 --- a/vendor/google.golang.org/protobuf/internal/version/version.go +++ b/vendor/google.golang.org/protobuf/internal/version/version.go @@ -54,7 +54,7 @@ const ( Major = 1 Minor = 25 Patch = 0 - PreRelease = "" + PreRelease = "devel" ) // String formats the version string for this module in semver format. diff --git a/vendor/google.golang.org/protobuf/proto/decode.go b/vendor/google.golang.org/protobuf/proto/decode.go index 42fc5195e87..a0efaa03855 100644 --- a/vendor/google.golang.org/protobuf/proto/decode.go +++ b/vendor/google.golang.org/protobuf/proto/decode.go @@ -116,10 +116,10 @@ func (o UnmarshalOptions) unmarshalMessageSlow(b []byte, m protoreflect.Message) // Parse the tag (field number and wire type). num, wtyp, tagLen := protowire.ConsumeTag(b) if tagLen < 0 { - return protowire.ParseError(tagLen) + return errDecode } if num > protowire.MaxValidNumber { - return errors.New("invalid field number") + return errDecode } // Find the field descriptor for this field number. @@ -159,7 +159,7 @@ func (o UnmarshalOptions) unmarshalMessageSlow(b []byte, m protoreflect.Message) } valLen = protowire.ConsumeFieldValue(num, wtyp, b[tagLen:]) if valLen < 0 { - return protowire.ParseError(valLen) + return errDecode } if !o.DiscardUnknown { m.SetUnknown(append(m.GetUnknown(), b[:tagLen+valLen]...)) @@ -194,7 +194,7 @@ func (o UnmarshalOptions) unmarshalMap(b []byte, wtyp protowire.Type, mapv proto } b, n = protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } var ( keyField = fd.MapKey() @@ -213,10 +213,10 @@ func (o UnmarshalOptions) unmarshalMap(b []byte, wtyp protowire.Type, mapv proto for len(b) > 0 { num, wtyp, n := protowire.ConsumeTag(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } if num > protowire.MaxValidNumber { - return 0, errors.New("invalid field number") + return 0, errDecode } b = b[n:] err = errUnknown @@ -246,7 +246,7 @@ func (o UnmarshalOptions) unmarshalMap(b []byte, wtyp protowire.Type, mapv proto if err == errUnknown { n = protowire.ConsumeFieldValue(num, wtyp, b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } } else if err != nil { return 0, err @@ -272,3 +272,5 @@ func (o UnmarshalOptions) unmarshalMap(b []byte, wtyp protowire.Type, mapv proto // to the unknown field set of a message. It is never returned from an exported // function. var errUnknown = errors.New("BUG: internal error (unknown)") + +var errDecode = errors.New("cannot parse invalid wire-format data") diff --git a/vendor/google.golang.org/protobuf/proto/decode_gen.go b/vendor/google.golang.org/protobuf/proto/decode_gen.go index d6dc904dccf..301eeb20f82 100644 --- a/vendor/google.golang.org/protobuf/proto/decode_gen.go +++ b/vendor/google.golang.org/protobuf/proto/decode_gen.go @@ -27,7 +27,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfBool(protowire.DecodeBool(v)), n, nil case protoreflect.EnumKind: @@ -36,7 +36,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfEnum(protoreflect.EnumNumber(v)), n, nil case protoreflect.Int32Kind: @@ -45,7 +45,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfInt32(int32(v)), n, nil case protoreflect.Sint32Kind: @@ -54,7 +54,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32))), n, nil case protoreflect.Uint32Kind: @@ -63,7 +63,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfUint32(uint32(v)), n, nil case protoreflect.Int64Kind: @@ -72,7 +72,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfInt64(int64(v)), n, nil case protoreflect.Sint64Kind: @@ -81,7 +81,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfInt64(protowire.DecodeZigZag(v)), n, nil case protoreflect.Uint64Kind: @@ -90,7 +90,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfUint64(v), n, nil case protoreflect.Sfixed32Kind: @@ -99,7 +99,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfInt32(int32(v)), n, nil case protoreflect.Fixed32Kind: @@ -108,7 +108,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfUint32(uint32(v)), n, nil case protoreflect.FloatKind: @@ -117,7 +117,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v))), n, nil case protoreflect.Sfixed64Kind: @@ -126,7 +126,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfInt64(int64(v)), n, nil case protoreflect.Fixed64Kind: @@ -135,7 +135,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfUint64(v), n, nil case protoreflect.DoubleKind: @@ -144,7 +144,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfFloat64(math.Float64frombits(v)), n, nil case protoreflect.StringKind: @@ -153,7 +153,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeBytes(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } if strs.EnforceUTF8(fd) && !utf8.Valid(v) { return protoreflect.Value{}, 0, errors.InvalidUTF8(string(fd.FullName())) @@ -165,7 +165,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeBytes(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfBytes(append(emptyBuf[:], v...)), n, nil case protoreflect.MessageKind: @@ -174,7 +174,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeBytes(b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfBytes(v), n, nil case protoreflect.GroupKind: @@ -183,7 +183,7 @@ func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd prot } v, n := protowire.ConsumeGroup(fd.Number(), b) if n < 0 { - return val, 0, protowire.ParseError(n) + return val, 0, errDecode } return protoreflect.ValueOfBytes(v), n, nil default: @@ -197,12 +197,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeVarint(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfBool(protowire.DecodeBool(v))) @@ -214,7 +214,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfBool(protowire.DecodeBool(v))) return n, nil @@ -222,12 +222,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeVarint(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfEnum(protoreflect.EnumNumber(v))) @@ -239,7 +239,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfEnum(protoreflect.EnumNumber(v))) return n, nil @@ -247,12 +247,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeVarint(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfInt32(int32(v))) @@ -264,7 +264,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfInt32(int32(v))) return n, nil @@ -272,12 +272,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeVarint(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32)))) @@ -289,7 +289,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32)))) return n, nil @@ -297,12 +297,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeVarint(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfUint32(uint32(v))) @@ -314,7 +314,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfUint32(uint32(v))) return n, nil @@ -322,12 +322,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeVarint(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfInt64(int64(v))) @@ -339,7 +339,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfInt64(int64(v))) return n, nil @@ -347,12 +347,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeVarint(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfInt64(protowire.DecodeZigZag(v))) @@ -364,7 +364,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfInt64(protowire.DecodeZigZag(v))) return n, nil @@ -372,12 +372,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeVarint(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfUint64(v)) @@ -389,7 +389,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeVarint(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfUint64(v)) return n, nil @@ -397,12 +397,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeFixed32(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfInt32(int32(v))) @@ -414,7 +414,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfInt32(int32(v))) return n, nil @@ -422,12 +422,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeFixed32(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfUint32(uint32(v))) @@ -439,7 +439,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfUint32(uint32(v))) return n, nil @@ -447,12 +447,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeFixed32(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v)))) @@ -464,7 +464,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeFixed32(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v)))) return n, nil @@ -472,12 +472,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeFixed64(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfInt64(int64(v))) @@ -489,7 +489,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfInt64(int64(v))) return n, nil @@ -497,12 +497,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeFixed64(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfUint64(v)) @@ -514,7 +514,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfUint64(v)) return n, nil @@ -522,12 +522,12 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot if wtyp == protowire.BytesType { buf, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } for len(buf) > 0 { v, n := protowire.ConsumeFixed64(buf) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } buf = buf[n:] list.Append(protoreflect.ValueOfFloat64(math.Float64frombits(v))) @@ -539,7 +539,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeFixed64(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfFloat64(math.Float64frombits(v))) return n, nil @@ -549,7 +549,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } if strs.EnforceUTF8(fd) && !utf8.Valid(v) { return 0, errors.InvalidUTF8(string(fd.FullName())) @@ -562,7 +562,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } list.Append(protoreflect.ValueOfBytes(append(emptyBuf[:], v...))) return n, nil @@ -572,7 +572,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeBytes(b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } m := list.NewElement() if err := o.unmarshalMessage(v, m.Message()); err != nil { @@ -586,7 +586,7 @@ func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list prot } v, n := protowire.ConsumeGroup(fd.Number(), b) if n < 0 { - return 0, protowire.ParseError(n) + return 0, errDecode } m := list.NewElement() if err := o.unmarshalMessage(v, m.Message()); err != nil { diff --git a/vendor/google.golang.org/protobuf/proto/encode.go b/vendor/google.golang.org/protobuf/proto/encode.go index 7b47a1180e4..d18239c2372 100644 --- a/vendor/google.golang.org/protobuf/proto/encode.go +++ b/vendor/google.golang.org/protobuf/proto/encode.go @@ -5,12 +5,9 @@ package proto import ( - "sort" - "google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/internal/encoding/messageset" - "google.golang.org/protobuf/internal/fieldsort" - "google.golang.org/protobuf/internal/mapsort" + "google.golang.org/protobuf/internal/order" "google.golang.org/protobuf/internal/pragma" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/runtime/protoiface" @@ -211,14 +208,15 @@ func (o MarshalOptions) marshalMessageSlow(b []byte, m protoreflect.Message) ([] if messageset.IsMessageSet(m.Descriptor()) { return o.marshalMessageSet(b, m) } - // There are many choices for what order we visit fields in. The default one here - // is chosen for reasonable efficiency and simplicity given the protoreflect API. - // It is not deterministic, since Message.Range does not return fields in any - // defined order. - // - // When using deterministic serialization, we sort the known fields. + fieldOrder := order.AnyFieldOrder + if o.Deterministic { + // TODO: This should use a more natural ordering like NumberFieldOrder, + // but doing so breaks golden tests that make invalid assumption about + // output stability of this implementation. + fieldOrder = order.LegacyFieldOrder + } var err error - o.rangeFields(m, func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + order.RangeFields(m, fieldOrder, func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { b, err = o.marshalField(b, fd, v) return err == nil }) @@ -229,27 +227,6 @@ func (o MarshalOptions) marshalMessageSlow(b []byte, m protoreflect.Message) ([] return b, nil } -// rangeFields visits fields in a defined order when deterministic serialization is enabled. -func (o MarshalOptions) rangeFields(m protoreflect.Message, f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) { - if !o.Deterministic { - m.Range(f) - return - } - var fds []protoreflect.FieldDescriptor - m.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { - fds = append(fds, fd) - return true - }) - sort.Slice(fds, func(a, b int) bool { - return fieldsort.Less(fds[a], fds[b]) - }) - for _, fd := range fds { - if !f(fd, m.Get(fd)) { - break - } - } -} - func (o MarshalOptions) marshalField(b []byte, fd protoreflect.FieldDescriptor, value protoreflect.Value) ([]byte, error) { switch { case fd.IsList(): @@ -292,8 +269,12 @@ func (o MarshalOptions) marshalList(b []byte, fd protoreflect.FieldDescriptor, l func (o MarshalOptions) marshalMap(b []byte, fd protoreflect.FieldDescriptor, mapv protoreflect.Map) ([]byte, error) { keyf := fd.MapKey() valf := fd.MapValue() + keyOrder := order.AnyKeyOrder + if o.Deterministic { + keyOrder = order.GenericKeyOrder + } var err error - o.rangeMap(mapv, keyf.Kind(), func(key protoreflect.MapKey, value protoreflect.Value) bool { + order.RangeEntries(mapv, keyOrder, func(key protoreflect.MapKey, value protoreflect.Value) bool { b = protowire.AppendTag(b, fd.Number(), protowire.BytesType) var pos int b, pos = appendSpeculativeLength(b) @@ -312,14 +293,6 @@ func (o MarshalOptions) marshalMap(b []byte, fd protoreflect.FieldDescriptor, ma return b, err } -func (o MarshalOptions) rangeMap(mapv protoreflect.Map, kind protoreflect.Kind, f func(protoreflect.MapKey, protoreflect.Value) bool) { - if !o.Deterministic { - mapv.Range(f) - return - } - mapsort.Range(mapv, kind, f) -} - // When encoding length-prefixed fields, we speculatively set aside some number of bytes // for the length, encode the data, and then encode the length (shifting the data if necessary // to make room). diff --git a/vendor/google.golang.org/protobuf/proto/messageset.go b/vendor/google.golang.org/protobuf/proto/messageset.go index 1d692c3a8b3..312d5d45c60 100644 --- a/vendor/google.golang.org/protobuf/proto/messageset.go +++ b/vendor/google.golang.org/protobuf/proto/messageset.go @@ -9,6 +9,7 @@ import ( "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/order" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoregistry" ) @@ -28,8 +29,12 @@ func (o MarshalOptions) marshalMessageSet(b []byte, m protoreflect.Message) ([]b if !flags.ProtoLegacy { return b, errors.New("no support for message_set_wire_format") } + fieldOrder := order.AnyFieldOrder + if o.Deterministic { + fieldOrder = order.NumberFieldOrder + } var err error - o.rangeFields(m, func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + order.RangeFields(m, fieldOrder, func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { b, err = o.marshalMessageSetField(b, fd, v) return err == nil }) diff --git a/vendor/google.golang.org/protobuf/proto/proto.go b/vendor/google.golang.org/protobuf/proto/proto.go index ca14b09c341..1f0d183b102 100644 --- a/vendor/google.golang.org/protobuf/proto/proto.go +++ b/vendor/google.golang.org/protobuf/proto/proto.go @@ -32,3 +32,12 @@ var Error error func init() { Error = errors.Error } + +// MessageName returns the full name of m. +// If m is nil, it returns an empty string. +func MessageName(m Message) protoreflect.FullName { + if m == nil { + return "" + } + return m.ProtoReflect().Descriptor().FullName() +} diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go index 37f254d4c05..e4dfb120506 100644 --- a/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go @@ -144,6 +144,7 @@ func (o FileOptions) New(fd *descriptorpb.FileDescriptorProto, r Resolver) (prot } // Handle source locations. + f.L2.Locations.File = f for _, loc := range fd.GetSourceCodeInfo().GetLocation() { var l protoreflect.SourceLocation // TODO: Validate that the path points to an actual declaration? diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go index 673a230e7a0..37efda1afe9 100644 --- a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go @@ -135,7 +135,7 @@ func (r descsByName) initFieldsFromDescriptorProto(fds []*descriptorpb.FieldDesc f.L1.Kind = protoreflect.Kind(fd.GetType()) } if fd.JsonName != nil { - f.L1.JSONName.Init(fd.GetJsonName()) + f.L1.StringName.InitJSON(fd.GetJsonName()) } } return fs, nil @@ -175,7 +175,7 @@ func (r descsByName) initExtensionDeclarations(xds []*descriptorpb.FieldDescript x.L1.Kind = protoreflect.Kind(xd.GetType()) } if xd.JsonName != nil { - x.L2.JSONName.Init(xd.GetJsonName()) + x.L2.StringName.InitJSON(xd.GetJsonName()) } } return xs, nil diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go index 2d5fa9936ba..9af1d56487a 100644 --- a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go @@ -239,6 +239,9 @@ func validateExtensionDeclarations(xs []filedesc.Extension, xds []*descriptorpb. return errors.New("extension field %q has an invalid cardinality: %d", x.FullName(), x.Cardinality()) } if xd.JsonName != nil { + // A bug in older versions of protoc would always populate the + // "json_name" option for extensions when it is meaningless. + // When it did so, it would always use the camel-cased field name. if xd.GetJsonName() != strs.JSONCamelCase(string(x.Name())) { return errors.New("extension field %q may not have an explicitly set JSON name: %q", x.FullName(), xd.GetJsonName()) } diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/proto.go b/vendor/google.golang.org/protobuf/reflect/protodesc/proto.go index 00d35e02eee..a1b7c505877 100644 --- a/vendor/google.golang.org/protobuf/reflect/protodesc/proto.go +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/proto.go @@ -9,6 +9,7 @@ import ( "strings" "google.golang.org/protobuf/internal/encoding/defval" + "google.golang.org/protobuf/internal/strs" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/reflect/protoreflect" @@ -138,7 +139,14 @@ func ToFieldDescriptorProto(field protoreflect.FieldDescriptor) *descriptorpb.Fi p.TypeName = fullNameOf(field.Message()) } if field.HasJSONName() { - p.JsonName = proto.String(field.JSONName()) + // A bug in older versions of protoc would always populate the + // "json_name" option for extensions when it is meaningless. + // When it did so, it would always use the camel-cased field name. + if field.IsExtension() { + p.JsonName = proto.String(strs.JSONCamelCase(string(field.Name()))) + } else { + p.JsonName = proto.String(field.JSONName()) + } } if field.Syntax() == protoreflect.Proto3 && field.HasOptionalKeyword() { p.Proto3Optional = proto.Bool(true) diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/source.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/source.go index 32ea3d98cd2..121ba3a07bb 100644 --- a/vendor/google.golang.org/protobuf/reflect/protoreflect/source.go +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/source.go @@ -4,6 +4,10 @@ package protoreflect +import ( + "strconv" +) + // SourceLocations is a list of source locations. type SourceLocations interface { // Len reports the number of source locations in the proto file. @@ -11,9 +15,20 @@ type SourceLocations interface { // Get returns the ith SourceLocation. It panics if out of bounds. Get(int) SourceLocation - doNotImplement + // ByPath returns the SourceLocation for the given path, + // returning the first location if multiple exist for the same path. + // If multiple locations exist for the same path, + // then SourceLocation.Next index can be used to identify the + // index of the next SourceLocation. + // If no location exists for this path, it returns the zero value. + ByPath(path SourcePath) SourceLocation - // TODO: Add ByPath and ByDescriptor helper methods. + // ByDescriptor returns the SourceLocation for the given descriptor, + // returning the first location if multiple exist for the same path. + // If no location exists for this descriptor, it returns the zero value. + ByDescriptor(desc Descriptor) SourceLocation + + doNotImplement } // SourceLocation describes a source location and @@ -39,6 +54,10 @@ type SourceLocation struct { LeadingComments string // TrailingComments is the trailing attached comment for the declaration. TrailingComments string + + // Next is an index into SourceLocations for the next source location that + // has the same Path. It is zero if there is no next location. + Next int } // SourcePath identifies part of a file descriptor for a source location. @@ -48,5 +67,62 @@ type SourceLocation struct { // See google.protobuf.SourceCodeInfo.Location.path. type SourcePath []int32 -// TODO: Add SourcePath.String method to pretty-print the path. For example: -// ".message_type[6].nested_type[15].field[3]" +// Equal reports whether p1 equals p2. +func (p1 SourcePath) Equal(p2 SourcePath) bool { + if len(p1) != len(p2) { + return false + } + for i := range p1 { + if p1[i] != p2[i] { + return false + } + } + return true +} + +// String formats the path in a humanly readable manner. +// The output is guaranteed to be deterministic, +// making it suitable for use as a key into a Go map. +// It is not guaranteed to be stable as the exact output could change +// in a future version of this module. +// +// Example output: +// .message_type[6].nested_type[15].field[3] +func (p SourcePath) String() string { + b := p.appendFileDescriptorProto(nil) + for _, i := range p { + b = append(b, '.') + b = strconv.AppendInt(b, int64(i), 10) + } + return string(b) +} + +type appendFunc func(*SourcePath, []byte) []byte + +func (p *SourcePath) appendSingularField(b []byte, name string, f appendFunc) []byte { + if len(*p) == 0 { + return b + } + b = append(b, '.') + b = append(b, name...) + *p = (*p)[1:] + if f != nil { + b = f(p, b) + } + return b +} + +func (p *SourcePath) appendRepeatedField(b []byte, name string, f appendFunc) []byte { + b = p.appendSingularField(b, name, nil) + if len(*p) == 0 || (*p)[0] < 0 { + return b + } + b = append(b, '[') + b = strconv.AppendUint(b, uint64((*p)[0]), 10) + b = append(b, ']') + *p = (*p)[1:] + if f != nil { + b = f(p, b) + } + return b +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go new file mode 100644 index 00000000000..b03c1223c4a --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go @@ -0,0 +1,461 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package protoreflect + +func (p *SourcePath) appendFileDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendSingularField(b, "package", nil) + case 3: + b = p.appendRepeatedField(b, "dependency", nil) + case 10: + b = p.appendRepeatedField(b, "public_dependency", nil) + case 11: + b = p.appendRepeatedField(b, "weak_dependency", nil) + case 4: + b = p.appendRepeatedField(b, "message_type", (*SourcePath).appendDescriptorProto) + case 5: + b = p.appendRepeatedField(b, "enum_type", (*SourcePath).appendEnumDescriptorProto) + case 6: + b = p.appendRepeatedField(b, "service", (*SourcePath).appendServiceDescriptorProto) + case 7: + b = p.appendRepeatedField(b, "extension", (*SourcePath).appendFieldDescriptorProto) + case 8: + b = p.appendSingularField(b, "options", (*SourcePath).appendFileOptions) + case 9: + b = p.appendSingularField(b, "source_code_info", (*SourcePath).appendSourceCodeInfo) + case 12: + b = p.appendSingularField(b, "syntax", nil) + } + return b +} + +func (p *SourcePath) appendDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendRepeatedField(b, "field", (*SourcePath).appendFieldDescriptorProto) + case 6: + b = p.appendRepeatedField(b, "extension", (*SourcePath).appendFieldDescriptorProto) + case 3: + b = p.appendRepeatedField(b, "nested_type", (*SourcePath).appendDescriptorProto) + case 4: + b = p.appendRepeatedField(b, "enum_type", (*SourcePath).appendEnumDescriptorProto) + case 5: + b = p.appendRepeatedField(b, "extension_range", (*SourcePath).appendDescriptorProto_ExtensionRange) + case 8: + b = p.appendRepeatedField(b, "oneof_decl", (*SourcePath).appendOneofDescriptorProto) + case 7: + b = p.appendSingularField(b, "options", (*SourcePath).appendMessageOptions) + case 9: + b = p.appendRepeatedField(b, "reserved_range", (*SourcePath).appendDescriptorProto_ReservedRange) + case 10: + b = p.appendRepeatedField(b, "reserved_name", nil) + } + return b +} + +func (p *SourcePath) appendEnumDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendRepeatedField(b, "value", (*SourcePath).appendEnumValueDescriptorProto) + case 3: + b = p.appendSingularField(b, "options", (*SourcePath).appendEnumOptions) + case 4: + b = p.appendRepeatedField(b, "reserved_range", (*SourcePath).appendEnumDescriptorProto_EnumReservedRange) + case 5: + b = p.appendRepeatedField(b, "reserved_name", nil) + } + return b +} + +func (p *SourcePath) appendServiceDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendRepeatedField(b, "method", (*SourcePath).appendMethodDescriptorProto) + case 3: + b = p.appendSingularField(b, "options", (*SourcePath).appendServiceOptions) + } + return b +} + +func (p *SourcePath) appendFieldDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 3: + b = p.appendSingularField(b, "number", nil) + case 4: + b = p.appendSingularField(b, "label", nil) + case 5: + b = p.appendSingularField(b, "type", nil) + case 6: + b = p.appendSingularField(b, "type_name", nil) + case 2: + b = p.appendSingularField(b, "extendee", nil) + case 7: + b = p.appendSingularField(b, "default_value", nil) + case 9: + b = p.appendSingularField(b, "oneof_index", nil) + case 10: + b = p.appendSingularField(b, "json_name", nil) + case 8: + b = p.appendSingularField(b, "options", (*SourcePath).appendFieldOptions) + case 17: + b = p.appendSingularField(b, "proto3_optional", nil) + } + return b +} + +func (p *SourcePath) appendFileOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "java_package", nil) + case 8: + b = p.appendSingularField(b, "java_outer_classname", nil) + case 10: + b = p.appendSingularField(b, "java_multiple_files", nil) + case 20: + b = p.appendSingularField(b, "java_generate_equals_and_hash", nil) + case 27: + b = p.appendSingularField(b, "java_string_check_utf8", nil) + case 9: + b = p.appendSingularField(b, "optimize_for", nil) + case 11: + b = p.appendSingularField(b, "go_package", nil) + case 16: + b = p.appendSingularField(b, "cc_generic_services", nil) + case 17: + b = p.appendSingularField(b, "java_generic_services", nil) + case 18: + b = p.appendSingularField(b, "py_generic_services", nil) + case 42: + b = p.appendSingularField(b, "php_generic_services", nil) + case 23: + b = p.appendSingularField(b, "deprecated", nil) + case 31: + b = p.appendSingularField(b, "cc_enable_arenas", nil) + case 36: + b = p.appendSingularField(b, "objc_class_prefix", nil) + case 37: + b = p.appendSingularField(b, "csharp_namespace", nil) + case 39: + b = p.appendSingularField(b, "swift_prefix", nil) + case 40: + b = p.appendSingularField(b, "php_class_prefix", nil) + case 41: + b = p.appendSingularField(b, "php_namespace", nil) + case 44: + b = p.appendSingularField(b, "php_metadata_namespace", nil) + case 45: + b = p.appendSingularField(b, "ruby_package", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendSourceCodeInfo(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendRepeatedField(b, "location", (*SourcePath).appendSourceCodeInfo_Location) + } + return b +} + +func (p *SourcePath) appendDescriptorProto_ExtensionRange(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "start", nil) + case 2: + b = p.appendSingularField(b, "end", nil) + case 3: + b = p.appendSingularField(b, "options", (*SourcePath).appendExtensionRangeOptions) + } + return b +} + +func (p *SourcePath) appendOneofDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendSingularField(b, "options", (*SourcePath).appendOneofOptions) + } + return b +} + +func (p *SourcePath) appendMessageOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "message_set_wire_format", nil) + case 2: + b = p.appendSingularField(b, "no_standard_descriptor_accessor", nil) + case 3: + b = p.appendSingularField(b, "deprecated", nil) + case 7: + b = p.appendSingularField(b, "map_entry", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendDescriptorProto_ReservedRange(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "start", nil) + case 2: + b = p.appendSingularField(b, "end", nil) + } + return b +} + +func (p *SourcePath) appendEnumValueDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendSingularField(b, "number", nil) + case 3: + b = p.appendSingularField(b, "options", (*SourcePath).appendEnumValueOptions) + } + return b +} + +func (p *SourcePath) appendEnumOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 2: + b = p.appendSingularField(b, "allow_alias", nil) + case 3: + b = p.appendSingularField(b, "deprecated", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendEnumDescriptorProto_EnumReservedRange(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "start", nil) + case 2: + b = p.appendSingularField(b, "end", nil) + } + return b +} + +func (p *SourcePath) appendMethodDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendSingularField(b, "input_type", nil) + case 3: + b = p.appendSingularField(b, "output_type", nil) + case 4: + b = p.appendSingularField(b, "options", (*SourcePath).appendMethodOptions) + case 5: + b = p.appendSingularField(b, "client_streaming", nil) + case 6: + b = p.appendSingularField(b, "server_streaming", nil) + } + return b +} + +func (p *SourcePath) appendServiceOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 33: + b = p.appendSingularField(b, "deprecated", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendFieldOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "ctype", nil) + case 2: + b = p.appendSingularField(b, "packed", nil) + case 6: + b = p.appendSingularField(b, "jstype", nil) + case 5: + b = p.appendSingularField(b, "lazy", nil) + case 3: + b = p.appendSingularField(b, "deprecated", nil) + case 10: + b = p.appendSingularField(b, "weak", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendUninterpretedOption(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 2: + b = p.appendRepeatedField(b, "name", (*SourcePath).appendUninterpretedOption_NamePart) + case 3: + b = p.appendSingularField(b, "identifier_value", nil) + case 4: + b = p.appendSingularField(b, "positive_int_value", nil) + case 5: + b = p.appendSingularField(b, "negative_int_value", nil) + case 6: + b = p.appendSingularField(b, "double_value", nil) + case 7: + b = p.appendSingularField(b, "string_value", nil) + case 8: + b = p.appendSingularField(b, "aggregate_value", nil) + } + return b +} + +func (p *SourcePath) appendSourceCodeInfo_Location(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendRepeatedField(b, "path", nil) + case 2: + b = p.appendRepeatedField(b, "span", nil) + case 3: + b = p.appendSingularField(b, "leading_comments", nil) + case 4: + b = p.appendSingularField(b, "trailing_comments", nil) + case 6: + b = p.appendRepeatedField(b, "leading_detached_comments", nil) + } + return b +} + +func (p *SourcePath) appendExtensionRangeOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendOneofOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendEnumValueOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "deprecated", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendMethodOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 33: + b = p.appendSingularField(b, "deprecated", nil) + case 34: + b = p.appendSingularField(b, "idempotency_level", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendUninterpretedOption_NamePart(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name_part", nil) + case 2: + b = p.appendSingularField(b, "is_extension", nil) + } + return b +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/type.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/type.go index 5be14a72584..8e53c44a918 100644 --- a/vendor/google.golang.org/protobuf/reflect/protoreflect/type.go +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/type.go @@ -232,11 +232,15 @@ type MessageDescriptor interface { type isMessageDescriptor interface{ ProtoType(MessageDescriptor) } // MessageType encapsulates a MessageDescriptor with a concrete Go implementation. +// It is recommended that implementations of this interface also implement the +// MessageFieldTypes interface. type MessageType interface { // New returns a newly allocated empty message. + // It may return nil for synthetic messages representing a map entry. New() Message // Zero returns an empty, read-only message. + // It may return nil for synthetic messages representing a map entry. Zero() Message // Descriptor returns the message descriptor. @@ -245,6 +249,26 @@ type MessageType interface { Descriptor() MessageDescriptor } +// MessageFieldTypes extends a MessageType by providing type information +// regarding enums and messages referenced by the message fields. +type MessageFieldTypes interface { + MessageType + + // Enum returns the EnumType for the ith field in Descriptor.Fields. + // It returns nil if the ith field is not an enum kind. + // It panics if out of bounds. + // + // Invariant: mt.Enum(i).Descriptor() == mt.Descriptor().Fields(i).Enum() + Enum(i int) EnumType + + // Message returns the MessageType for the ith field in Descriptor.Fields. + // It returns nil if the ith field is not a message or group kind. + // It panics if out of bounds. + // + // Invariant: mt.Message(i).Descriptor() == mt.Descriptor().Fields(i).Message() + Message(i int) MessageType +} + // MessageDescriptors is a list of message declarations. type MessageDescriptors interface { // Len reports the number of messages. @@ -279,8 +303,15 @@ type FieldDescriptor interface { // JSONName reports the name used for JSON serialization. // It is usually the camel-cased form of the field name. + // Extension fields are represented by the full name surrounded by brackets. JSONName() string + // TextName reports the name used for text serialization. + // It is usually the name of the field, except that groups use the name + // of the inlined message, and extension fields are represented by the + // full name surrounded by brackets. + TextName() string + // HasPresence reports whether the field distinguishes between unpopulated // and default values. HasPresence() bool @@ -371,6 +402,9 @@ type FieldDescriptors interface { // ByJSONName returns the FieldDescriptor for a field with s as the JSON name. // It returns nil if not found. ByJSONName(s string) FieldDescriptor + // ByTextName returns the FieldDescriptor for a field with s as the text name. + // It returns nil if not found. + ByTextName(s string) FieldDescriptor // ByNumber returns the FieldDescriptor for a field numbered n. // It returns nil if not found. ByNumber(n FieldNumber) FieldDescriptor diff --git a/vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go b/vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go index 5e5f9671646..be261053c1f 100644 --- a/vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go +++ b/vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go @@ -21,7 +21,9 @@ import ( "strings" "sync" + "google.golang.org/protobuf/internal/encoding/messageset" "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/flags" "google.golang.org/protobuf/reflect/protoreflect" ) @@ -560,13 +562,25 @@ func (r *Types) FindEnumByName(enum protoreflect.FullName) (protoreflect.EnumTyp return nil, NotFound } -// FindMessageByName looks up a message by its full name. -// E.g., "google.protobuf.Any" +// FindMessageByName looks up a message by its full name, +// e.g. "google.protobuf.Any". // -// This return (nil, NotFound) if not found. +// This returns (nil, NotFound) if not found. func (r *Types) FindMessageByName(message protoreflect.FullName) (protoreflect.MessageType, error) { - // The full name by itself is a valid URL. - return r.FindMessageByURL(string(message)) + if r == nil { + return nil, NotFound + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + if v := r.typesByName[message]; v != nil { + if mt, _ := v.(protoreflect.MessageType); mt != nil { + return mt, nil + } + return nil, errors.New("found wrong type: got %v, want message", typeName(v)) + } + return nil, NotFound } // FindMessageByURL looks up a message by a URL identifier. @@ -574,6 +588,8 @@ func (r *Types) FindMessageByName(message protoreflect.FullName) (protoreflect.M // // This returns (nil, NotFound) if not found. func (r *Types) FindMessageByURL(url string) (protoreflect.MessageType, error) { + // This function is similar to FindMessageByName but + // truncates anything before and including '/' in the URL. if r == nil { return nil, NotFound } @@ -613,6 +629,26 @@ func (r *Types) FindExtensionByName(field protoreflect.FullName) (protoreflect.E if xt, _ := v.(protoreflect.ExtensionType); xt != nil { return xt, nil } + + // MessageSet extensions are special in that the name of the extension + // is the name of the message type used to extend the MessageSet. + // This naming scheme is used by text and JSON serialization. + // + // This feature is protected by the ProtoLegacy flag since MessageSets + // are a proto1 feature that is long deprecated. + if flags.ProtoLegacy { + if _, ok := v.(protoreflect.MessageType); ok { + field := field.Append(messageset.ExtensionName) + if v := r.typesByName[field]; v != nil { + if xt, _ := v.(protoreflect.ExtensionType); xt != nil { + if messageset.IsMessageSetExtension(xt.TypeDescriptor()) { + return xt, nil + } + } + } + } + } + return nil, errors.New("found wrong type: got %v, want extension", typeName(v)) } return nil, NotFound diff --git a/vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go b/vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go index 7db6e55987f..900b9d28747 100644 --- a/vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go +++ b/vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go @@ -369,6 +369,18 @@ func NewMessageType(desc pref.MessageDescriptor) pref.MessageType { func (mt messageType) New() pref.Message { return NewMessage(mt.desc) } func (mt messageType) Zero() pref.Message { return &Message{typ: messageType{mt.desc}} } func (mt messageType) Descriptor() pref.MessageDescriptor { return mt.desc } +func (mt messageType) Enum(i int) pref.EnumType { + if ed := mt.desc.Fields().Get(i).Enum(); ed != nil { + return NewEnumType(ed) + } + return nil +} +func (mt messageType) Message(i int) pref.MessageType { + if md := mt.desc.Fields().Get(i).Message(); md != nil { + return NewMessageType(md) + } + return nil +} type emptyList struct { desc pref.FieldDescriptor diff --git a/vendor/google.golang.org/protobuf/types/known/fieldmaskpb/field_mask.pb.go b/vendor/google.golang.org/protobuf/types/known/fieldmaskpb/field_mask.pb.go index 6a8d872c085..a852befef40 100644 --- a/vendor/google.golang.org/protobuf/types/known/fieldmaskpb/field_mask.pb.go +++ b/vendor/google.golang.org/protobuf/types/known/fieldmaskpb/field_mask.pb.go @@ -393,9 +393,12 @@ func numValidPaths(m proto.Message, paths []string) int { // Identify the next message to search within. md = fd.Message() // may be nil - if fd.IsMap() { - md = fd.MapValue().Message() // may be nil + + // Repeated fields are only allowed at the last postion. + if fd.IsList() || fd.IsMap() { + md = nil } + return true }) { return i diff --git a/vendor/modules.txt b/vendor/modules.txt index 38c09743b9c..35f4507579a 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -115,12 +115,13 @@ github.com/inconshreveable/mousetrap github.com/influxdata/influxdb1-client/models github.com/influxdata/influxdb1-client/pkg/escape github.com/influxdata/influxdb1-client/v2 -# github.com/jhump/protoreflect v1.7.0 +# github.com/jhump/protoreflect v1.8.2 ## explicit github.com/jhump/protoreflect/codec github.com/jhump/protoreflect/desc github.com/jhump/protoreflect/desc/internal github.com/jhump/protoreflect/desc/protoparse +github.com/jhump/protoreflect/desc/protoparse/ast github.com/jhump/protoreflect/dynamic github.com/jhump/protoreflect/internal github.com/jhump/protoreflect/internal/codec @@ -345,7 +346,7 @@ google.golang.org/grpc/stats google.golang.org/grpc/status google.golang.org/grpc/tap google.golang.org/grpc/test/grpc_testing -# google.golang.org/protobuf v1.25.0 +# google.golang.org/protobuf v1.25.1-0.20200805231151-a709e31e5d12 ## explicit google.golang.org/protobuf/encoding/protojson google.golang.org/protobuf/encoding/prototext @@ -359,13 +360,12 @@ google.golang.org/protobuf/internal/encoding/messageset google.golang.org/protobuf/internal/encoding/tag google.golang.org/protobuf/internal/encoding/text google.golang.org/protobuf/internal/errors -google.golang.org/protobuf/internal/fieldsort google.golang.org/protobuf/internal/filedesc google.golang.org/protobuf/internal/filetype google.golang.org/protobuf/internal/flags google.golang.org/protobuf/internal/genid google.golang.org/protobuf/internal/impl -google.golang.org/protobuf/internal/mapsort +google.golang.org/protobuf/internal/order google.golang.org/protobuf/internal/pragma google.golang.org/protobuf/internal/set google.golang.org/protobuf/internal/strs