From 9b9e9314fb9000ccb4f2503dd778b5e05af0b337 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Dec 2023 01:29:00 +0000 Subject: [PATCH] build(deps): bump github.com/kubernetes-csi/csi-lib-utils Bumps [github.com/kubernetes-csi/csi-lib-utils](https://github.com/kubernetes-csi/csi-lib-utils) from 0.14.0 to 0.16.0. - [Release notes](https://github.com/kubernetes-csi/csi-lib-utils/releases) - [Commits](https://github.com/kubernetes-csi/csi-lib-utils/compare/v0.14.0...v0.16.0) --- updated-dependencies: - dependency-name: github.com/kubernetes-csi/csi-lib-utils dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- go.mod | 25 +- go.sum | 40 +- .../jsonreference/internal/normalize_url.go | 5 + .../github.com/google/gnostic-models/LICENSE | 203 + .../google/gnostic-models/compiler/README.md | 4 + .../google/gnostic-models/compiler/context.go | 49 + .../google/gnostic-models/compiler/error.go | 70 + .../gnostic-models/compiler/extensions.go | 86 + .../google/gnostic-models/compiler/helpers.go | 397 + .../google/gnostic-models/compiler/main.go | 16 + .../google/gnostic-models/compiler/reader.go | 307 + .../gnostic-models/extensions/README.md | 13 + .../gnostic-models/extensions/extension.pb.go | 461 + .../gnostic-models/extensions/extension.proto | 97 + .../gnostic-models/extensions/extensions.go | 64 + .../gnostic-models/jsonschema/README.md | 4 + .../google/gnostic-models/jsonschema/base.go | 97 + .../gnostic-models/jsonschema/display.go | 229 + .../gnostic-models/jsonschema/models.go | 228 + .../gnostic-models/jsonschema/operations.go | 394 + .../gnostic-models/jsonschema/reader.go | 442 + .../gnostic-models/jsonschema/schema.json | 150 + .../gnostic-models/jsonschema/writer.go | 369 + .../gnostic-models/openapiv2/OpenAPIv2.go | 8820 +++++++++++++++++ .../gnostic-models/openapiv2/OpenAPIv2.pb.go | 7342 ++++++++++++++ .../gnostic-models/openapiv2/OpenAPIv2.proto | 666 ++ .../google/gnostic-models/openapiv2/README.md | 14 + .../gnostic-models/openapiv2/document.go | 42 + .../gnostic-models/openapiv2/openapi-2.0.json | 1610 +++ .../openapiv3/OpenAPIv3.go | 9 +- .../openapiv3/OpenAPIv3.pb.go | 13 +- .../openapiv3/OpenAPIv3.proto | 2 +- .../openapiv3/README.md | 4 - .../openapiv3/document.go | 2 +- .../gnostic/openapiv3/annotations.pb.go | 183 - .../gnostic/openapiv3/annotations.proto | 60 - .../google/gnostic/openapiv3/openapi-3.0.json | 1251 --- .../google/gnostic/openapiv3/openapi-3.1.json | 1250 --- .../protosanitizer/protosanitizer.go | 9 +- .../collectors/go_collector_latest.go | 2 + .../client_golang/prometheus/counter.go | 26 +- .../client_golang/prometheus/desc.go | 46 +- .../client_golang/prometheus/doc.go | 44 +- .../client_golang/prometheus/gauge.go | 26 +- .../prometheus/go_collector_latest.go | 7 +- .../client_golang/prometheus/histogram.go | 61 +- .../client_golang/prometheus/labels.go | 72 + .../client_golang/prometheus/metric.go | 6 +- .../client_golang/prometheus/promhttp/http.go | 19 +- .../prometheus/promhttp/instrument_client.go | 26 +- .../prometheus/promhttp/instrument_server.go | 101 +- .../prometheus/promhttp/option.go | 38 +- .../client_golang/prometheus/registry.go | 17 +- .../client_golang/prometheus/summary.go | 39 +- .../client_golang/prometheus/timer.go | 28 +- .../client_golang/prometheus/value.go | 10 +- .../client_golang/prometheus/vec.go | 79 +- .../client_golang/prometheus/vnext.go | 23 + .../client_golang/prometheus/wrap.go | 8 +- .../prometheus/client_model/go/metrics.pb.go | 1530 +-- .../prometheus/common/expfmt/decode.go | 39 +- .../prometheus/common/expfmt/encode.go | 13 +- .../prometheus/common/expfmt/expfmt.go | 26 +- .../prometheus/common/expfmt/fuzz.go | 4 +- .../common/expfmt/openmetrics_create.go | 22 +- .../prometheus/common/expfmt/text_parse.go | 12 +- .../bitbucket.org/ww/goautoneg/autoneg.go | 22 +- .../prometheus/common/model/time.go | 89 +- .../prometheus/common/model/value.go | 246 +- .../prometheus/common/model/value_float.go | 100 + .../common/model/value_histogram.go | 178 + .../prometheus/common/model/value_type.go | 83 + .../prometheus/procfs/Makefile.common | 23 +- .../github.com/prometheus/procfs/cpuinfo.go | 36 + .../prometheus/procfs/cpuinfo_loong64.go | 19 + .../prometheus/procfs/cpuinfo_others.go | 4 +- vendor/github.com/prometheus/procfs/doc.go | 51 +- vendor/github.com/prometheus/procfs/fs.go | 9 +- .../prometheus/procfs/fs_statfs_notype.go | 23 + .../prometheus/procfs/fs_statfs_type.go | 33 + .../prometheus/procfs/internal/util/parse.go | 15 + .../prometheus/procfs/mountstats.go | 9 +- .../prometheus/procfs/net_conntrackstat.go | 88 +- .../prometheus/procfs/net_softnet.go | 75 +- .../prometheus/procfs/net_wireless.go | 182 + .../github.com/prometheus/procfs/netstat.go | 54 +- vendor/github.com/prometheus/procfs/proc.go | 22 +- .../prometheus/procfs/proc_cgroup.go | 2 +- .../prometheus/procfs/proc_interrupts.go | 98 + .../prometheus/procfs/proc_netstat.go | 491 +- .../github.com/prometheus/procfs/proc_snmp.go | 318 +- .../prometheus/procfs/proc_snmp6.go | 364 +- .../github.com/prometheus/procfs/proc_stat.go | 10 +- .../prometheus/procfs/proc_status.go | 38 +- vendor/github.com/prometheus/procfs/stat.go | 22 +- vendor/github.com/prometheus/procfs/thread.go | 80 + vendor/github.com/prometheus/procfs/vm.go | 4 +- .../k8s.io/kube-openapi/pkg/cached/cache.go | 166 +- .../kube-openapi/pkg/handler3/handler.go | 2 +- .../kube-openapi/pkg/util/proto/document.go | 2 +- .../pkg/util/proto/document_v3.go | 2 +- .../pkg/validation/spec/gnostic.go | 2 +- vendor/k8s.io/utils/nsenter/nsenter.go | 27 +- vendor/k8s.io/utils/trace/trace.go | 19 + vendor/modules.txt | 40 +- 105 files changed, 25784 insertions(+), 4615 deletions(-) create mode 100644 vendor/github.com/google/gnostic-models/LICENSE create mode 100644 vendor/github.com/google/gnostic-models/compiler/README.md create mode 100644 vendor/github.com/google/gnostic-models/compiler/context.go create mode 100644 vendor/github.com/google/gnostic-models/compiler/error.go create mode 100644 vendor/github.com/google/gnostic-models/compiler/extensions.go create mode 100644 vendor/github.com/google/gnostic-models/compiler/helpers.go create mode 100644 vendor/github.com/google/gnostic-models/compiler/main.go create mode 100644 vendor/github.com/google/gnostic-models/compiler/reader.go create mode 100644 vendor/github.com/google/gnostic-models/extensions/README.md create mode 100644 vendor/github.com/google/gnostic-models/extensions/extension.pb.go create mode 100644 vendor/github.com/google/gnostic-models/extensions/extension.proto create mode 100644 vendor/github.com/google/gnostic-models/extensions/extensions.go create mode 100644 vendor/github.com/google/gnostic-models/jsonschema/README.md create mode 100644 vendor/github.com/google/gnostic-models/jsonschema/base.go create mode 100644 vendor/github.com/google/gnostic-models/jsonschema/display.go create mode 100644 vendor/github.com/google/gnostic-models/jsonschema/models.go create mode 100644 vendor/github.com/google/gnostic-models/jsonschema/operations.go create mode 100644 vendor/github.com/google/gnostic-models/jsonschema/reader.go create mode 100644 vendor/github.com/google/gnostic-models/jsonschema/schema.json create mode 100644 vendor/github.com/google/gnostic-models/jsonschema/writer.go create mode 100644 vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.go create mode 100644 vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.pb.go create mode 100644 vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.proto create mode 100644 vendor/github.com/google/gnostic-models/openapiv2/README.md create mode 100644 vendor/github.com/google/gnostic-models/openapiv2/document.go create mode 100644 vendor/github.com/google/gnostic-models/openapiv2/openapi-2.0.json rename vendor/github.com/google/{gnostic => gnostic-models}/openapiv3/OpenAPIv3.go (99%) rename vendor/github.com/google/{gnostic => gnostic-models}/openapiv3/OpenAPIv3.pb.go (99%) rename vendor/github.com/google/{gnostic => gnostic-models}/openapiv3/OpenAPIv3.proto (99%) rename vendor/github.com/google/{gnostic => gnostic-models}/openapiv3/README.md (89%) rename vendor/github.com/google/{gnostic => gnostic-models}/openapiv3/document.go (96%) delete mode 100644 vendor/github.com/google/gnostic/openapiv3/annotations.pb.go delete mode 100644 vendor/github.com/google/gnostic/openapiv3/annotations.proto delete mode 100644 vendor/github.com/google/gnostic/openapiv3/openapi-3.0.json delete mode 100644 vendor/github.com/google/gnostic/openapiv3/openapi-3.1.json create mode 100644 vendor/github.com/prometheus/client_golang/prometheus/vnext.go create mode 100644 vendor/github.com/prometheus/common/model/value_float.go create mode 100644 vendor/github.com/prometheus/common/model/value_histogram.go create mode 100644 vendor/github.com/prometheus/common/model/value_type.go create mode 100644 vendor/github.com/prometheus/procfs/cpuinfo_loong64.go create mode 100644 vendor/github.com/prometheus/procfs/fs_statfs_notype.go create mode 100644 vendor/github.com/prometheus/procfs/fs_statfs_type.go create mode 100644 vendor/github.com/prometheus/procfs/net_wireless.go create mode 100644 vendor/github.com/prometheus/procfs/proc_interrupts.go create mode 100644 vendor/github.com/prometheus/procfs/thread.go diff --git a/go.mod b/go.mod index 8daeb697..45135658 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.19 require ( github.com/container-storage-interface/spec v1.8.0 - github.com/kubernetes-csi/csi-lib-utils v0.14.0 + github.com/kubernetes-csi/csi-lib-utils v0.16.0 golang.org/x/net v0.19.0 google.golang.org/grpc v1.59.0 k8s.io/klog/v2 v2.110.1 @@ -20,12 +20,13 @@ require ( github.com/emicklei/go-restful/v3 v3.10.1 // indirect github.com/go-logr/logr v1.3.0 // indirect github.com/go-openapi/jsonpointer v0.19.6 // indirect - github.com/go-openapi/jsonreference v0.20.1 // indirect + github.com/go-openapi/jsonreference v0.20.2 // indirect github.com/go-openapi/swag v0.22.3 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/gnostic v0.6.9 // indirect + github.com/google/gnostic-models v0.6.8 // indirect github.com/google/go-cmp v0.5.9 // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/google/uuid v1.3.1 // indirect @@ -38,10 +39,10 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/opencontainers/selinux v1.10.2 // indirect - github.com/prometheus/client_golang v1.14.0 // indirect - github.com/prometheus/client_model v0.3.0 // indirect - github.com/prometheus/common v0.38.0 // indirect - github.com/prometheus/procfs v0.8.0 // indirect + github.com/prometheus/client_golang v1.16.0 // indirect + github.com/prometheus/client_model v0.4.0 // indirect + github.com/prometheus/common v0.44.0 // indirect + github.com/prometheus/procfs v0.10.1 // indirect github.com/spf13/pflag v1.0.5 // indirect golang.org/x/oauth2 v0.11.0 // indirect golang.org/x/sys v0.15.0 // indirect @@ -54,14 +55,14 @@ require ( gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - k8s.io/api v0.27.0 // indirect - k8s.io/apimachinery v0.27.0 // indirect + k8s.io/api v0.28.0 // indirect + k8s.io/apimachinery v0.28.0 // indirect k8s.io/apiserver v0.26.11 // indirect - k8s.io/client-go v0.27.0 // indirect + k8s.io/client-go v0.28.0 // indirect k8s.io/cloud-provider v0.26.11 // indirect - k8s.io/component-base v0.26.11 // indirect + k8s.io/component-base v0.28.0 // indirect k8s.io/component-helpers v0.26.11 // indirect - k8s.io/kube-openapi v0.0.0-20230308215209-15aac26d736a // indirect + k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9 // indirect sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect sigs.k8s.io/yaml v1.3.0 // indirect @@ -69,7 +70,7 @@ require ( require ( github.com/kubernetes-csi/csi-lib-iscsi v0.0.0-20230429162816-0798dedca439 - k8s.io/utils v0.0.0-20230209194617-a36077c30491 + k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 ) replace k8s.io/api => k8s.io/api v0.26.11 diff --git a/go.sum b/go.sum index 0159d701..d9973aaa 100644 --- a/go.sum +++ b/go.sum @@ -39,8 +39,8 @@ github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY= github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= -github.com/go-openapi/jsonreference v0.20.1 h1:FBLnyygC4/IZZr893oiomc9XaghoveYTrLC1F86HID8= -github.com/go-openapi/jsonreference v0.20.1/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE= +github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= @@ -53,7 +53,6 @@ github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -68,6 +67,8 @@ github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= +github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= +github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -90,16 +91,16 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kubernetes-csi/csi-lib-iscsi v0.0.0-20230429162816-0798dedca439 h1:aXjlx9Cod1FbhrpJ1pE5QrU3yAY4ECKCzE/PwdVziEY= github.com/kubernetes-csi/csi-lib-iscsi v0.0.0-20230429162816-0798dedca439/go.mod h1:vXHOVlFzOI1dgPszmjkUbwHd+/Pbe9mEg55vvIM4tjI= -github.com/kubernetes-csi/csi-lib-utils v0.14.0 h1:pusB32LkSd7GhuT8Z6cyRFqByujc28ygWV97ndaT19s= -github.com/kubernetes-csi/csi-lib-utils v0.14.0/go.mod h1:uX8xidqxGJOLXtsfCCVsxWtZl/9NiLyd2DD3Nb+KoP4= +github.com/kubernetes-csi/csi-lib-utils v0.16.0 h1:LXCvkhXHtFOkl7LoDqFdho/MuebccZqWxLwhKiRGiBg= +github.com/kubernetes-csi/csi-lib-utils v0.16.0/go.mod h1:fp1Oik+45tP2o4X9SD/SBWXLTQYT9wtLxGasBE3+vBI= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= @@ -122,16 +123,17 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= -github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= -github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= +github.com/prometheus/client_golang v1.16.0 h1:yk/hx9hDbrGHovbci4BY+pRMfSuuat626eFsHb7tmT8= +github.com/prometheus/client_golang v1.16.0/go.mod h1:Zsulrv/L9oM40tJ7T815tM89lFEugiJ9HzIqaAx4LKc= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= -github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/common v0.38.0 h1:VTQitp6mXTdUoCmDMugDVOJ1opi6ADftKfp/yeqTR/E= -github.com/prometheus/common v0.38.0/go.mod h1:MBXfmBQZrK5XpbCkjofnXs96LD2QQ7fEq4C0xjC/yec= -github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= -github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= +github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY= +github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= +github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= +github.com/prometheus/procfs v0.10.1 h1:kYK1Va/YMlutzCGazswoHKo//tZVlFpKYh+PymziUAg= +github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= @@ -144,7 +146,7 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= @@ -290,14 +292,14 @@ k8s.io/component-helpers v0.26.11/go.mod h1:lw3bchkI0NHMPmb+CE73GznPW0Mvqd/Y9UVM k8s.io/csi-translation-lib v0.26.11 h1:mYjelMXM5x4RtPPc4ULoy2RknlFCV2nwpXbThkY4K0c= k8s.io/klog/v2 v2.110.1 h1:U/Af64HJf7FcwMcXyKm2RPM22WZzyR7OSpYj5tg3cL0= k8s.io/klog/v2 v2.110.1/go.mod h1:YGtd1984u+GgbuZ7e08/yBuAfKLSO0+uR1Fhi6ExXjo= -k8s.io/kube-openapi v0.0.0-20230308215209-15aac26d736a h1:gmovKNur38vgoWfGtP5QOGNOA7ki4n6qNYoFAgMlNvg= -k8s.io/kube-openapi v0.0.0-20230308215209-15aac26d736a/go.mod h1:y5VtZWM9sHHc2ZodIH/6SHzXj+TPU5USoA8lcIeKEKY= +k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9 h1:LyMgNKD2P8Wn1iAwQU5OhxCKlKJy0sHc+PcDwFB24dQ= +k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9/go.mod h1:wZK2AVp1uHCp4VamDVgBP2COHZjqD1T68Rf0CM3YjSM= k8s.io/kubernetes v1.26.11 h1:g3r1IAUqsaHnOG2jdpoagJ5W9UCXkR2ljQ/7BmCzPNg= k8s.io/kubernetes v1.26.11/go.mod h1:z1URAaBJ+XnOTr3Q/l4umxRUxn/OyD2fbkUgS0Bl7u4= k8s.io/mount-utils v0.26.11 h1:wt0TyLv1YhRAxHvQB6w3GikdeHnMm1hpwzOVLweRGyI= k8s.io/mount-utils v0.26.11/go.mod h1:huSg2NI5P8ZNfE8PkQmm5a9fFZ9iHCXFxP/rasMCgYA= -k8s.io/utils v0.0.0-20230209194617-a36077c30491 h1:r0BAOLElQnnFhE/ApUsg3iHdVYYPBjNSSOMowRZxxsY= -k8s.io/utils v0.0.0-20230209194617-a36077c30491/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 h1:qY1Ad8PODbnymg2pRbkyMT/ylpTrCM8P2RJ0yroCyIk= +k8s.io/utils v0.0.0-20230406110748-d93618cff8a2/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= diff --git a/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go index fb376fce..f0610cf1 100644 --- a/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go +++ b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go @@ -26,11 +26,16 @@ var rxDupSlashes = regexp.MustCompile(`/{2,}`) // - FlagLowercaseHost // - FlagRemoveDefaultPort // - FlagRemoveDuplicateSlashes (and this was mixed in with the |) +// +// This also normalizes the URL into its urlencoded form by removing RawPath and RawFragment. func NormalizeURL(u *url.URL) { lowercaseScheme(u) lowercaseHost(u) removeDefaultPort(u) removeDuplicateSlashes(u) + + u.RawPath = "" + u.RawFragment = "" } func lowercaseScheme(u *url.URL) { diff --git a/vendor/github.com/google/gnostic-models/LICENSE b/vendor/github.com/google/gnostic-models/LICENSE new file mode 100644 index 00000000..6b0b1270 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/LICENSE @@ -0,0 +1,203 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/vendor/github.com/google/gnostic-models/compiler/README.md b/vendor/github.com/google/gnostic-models/compiler/README.md new file mode 100644 index 00000000..ee9783d2 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/README.md @@ -0,0 +1,4 @@ +# Compiler support code + +This directory contains compiler support code used by Gnostic and Gnostic +extensions. diff --git a/vendor/github.com/google/gnostic-models/compiler/context.go b/vendor/github.com/google/gnostic-models/compiler/context.go new file mode 100644 index 00000000..1bfe9612 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/context.go @@ -0,0 +1,49 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + yaml "gopkg.in/yaml.v3" +) + +// Context contains state of the compiler as it traverses a document. +type Context struct { + Parent *Context + Name string + Node *yaml.Node + ExtensionHandlers *[]ExtensionHandler +} + +// NewContextWithExtensions returns a new object representing the compiler state +func NewContextWithExtensions(name string, node *yaml.Node, parent *Context, extensionHandlers *[]ExtensionHandler) *Context { + return &Context{Name: name, Node: node, Parent: parent, ExtensionHandlers: extensionHandlers} +} + +// NewContext returns a new object representing the compiler state +func NewContext(name string, node *yaml.Node, parent *Context) *Context { + if parent != nil { + return &Context{Name: name, Node: node, Parent: parent, ExtensionHandlers: parent.ExtensionHandlers} + } + return &Context{Name: name, Parent: parent, ExtensionHandlers: nil} +} + +// Description returns a text description of the compiler state +func (context *Context) Description() string { + name := context.Name + if context.Parent != nil { + name = context.Parent.Description() + "." + name + } + return name +} diff --git a/vendor/github.com/google/gnostic-models/compiler/error.go b/vendor/github.com/google/gnostic-models/compiler/error.go new file mode 100644 index 00000000..6f40515d --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/error.go @@ -0,0 +1,70 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import "fmt" + +// Error represents compiler errors and their location in the document. +type Error struct { + Context *Context + Message string +} + +// NewError creates an Error. +func NewError(context *Context, message string) *Error { + return &Error{Context: context, Message: message} +} + +func (err *Error) locationDescription() string { + if err.Context.Node != nil { + return fmt.Sprintf("[%d,%d] %s", err.Context.Node.Line, err.Context.Node.Column, err.Context.Description()) + } + return err.Context.Description() +} + +// Error returns the string value of an Error. +func (err *Error) Error() string { + if err.Context == nil { + return err.Message + } + return err.locationDescription() + " " + err.Message +} + +// ErrorGroup is a container for groups of Error values. +type ErrorGroup struct { + Errors []error +} + +// NewErrorGroupOrNil returns a new ErrorGroup for a slice of errors or nil if the slice is empty. +func NewErrorGroupOrNil(errors []error) error { + if len(errors) == 0 { + return nil + } else if len(errors) == 1 { + return errors[0] + } else { + return &ErrorGroup{Errors: errors} + } +} + +func (group *ErrorGroup) Error() string { + result := "" + for i, err := range group.Errors { + if i > 0 { + result += "\n" + } + result += err.Error() + } + return result +} diff --git a/vendor/github.com/google/gnostic-models/compiler/extensions.go b/vendor/github.com/google/gnostic-models/compiler/extensions.go new file mode 100644 index 00000000..250c81e8 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/extensions.go @@ -0,0 +1,86 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "bytes" + "fmt" + "os/exec" + "strings" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes/any" + yaml "gopkg.in/yaml.v3" + + extensions "github.com/google/gnostic-models/extensions" +) + +// ExtensionHandler describes a binary that is called by the compiler to handle specification extensions. +type ExtensionHandler struct { + Name string +} + +// CallExtension calls a binary extension handler. +func CallExtension(context *Context, in *yaml.Node, extensionName string) (handled bool, response *any.Any, err error) { + if context == nil || context.ExtensionHandlers == nil { + return false, nil, nil + } + handled = false + for _, handler := range *(context.ExtensionHandlers) { + response, err = handler.handle(in, extensionName) + if response == nil { + continue + } else { + handled = true + break + } + } + return handled, response, err +} + +func (extensionHandlers *ExtensionHandler) handle(in *yaml.Node, extensionName string) (*any.Any, error) { + if extensionHandlers.Name != "" { + yamlData, _ := yaml.Marshal(in) + request := &extensions.ExtensionHandlerRequest{ + CompilerVersion: &extensions.Version{ + Major: 0, + Minor: 1, + Patch: 0, + }, + Wrapper: &extensions.Wrapper{ + Version: "unknown", // TODO: set this to the type/version of spec being parsed. + Yaml: string(yamlData), + ExtensionName: extensionName, + }, + } + requestBytes, _ := proto.Marshal(request) + cmd := exec.Command(extensionHandlers.Name) + cmd.Stdin = bytes.NewReader(requestBytes) + output, err := cmd.Output() + if err != nil { + return nil, err + } + response := &extensions.ExtensionHandlerResponse{} + err = proto.Unmarshal(output, response) + if err != nil || !response.Handled { + return nil, err + } + if len(response.Errors) != 0 { + return nil, fmt.Errorf("Errors when parsing: %+v for field %s by vendor extension handler %s. Details %+v", in, extensionName, extensionHandlers.Name, strings.Join(response.Errors, ",")) + } + return response.Value, nil + } + return nil, nil +} diff --git a/vendor/github.com/google/gnostic-models/compiler/helpers.go b/vendor/github.com/google/gnostic-models/compiler/helpers.go new file mode 100644 index 00000000..975d65e8 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/helpers.go @@ -0,0 +1,397 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "fmt" + "regexp" + "sort" + "strconv" + + "gopkg.in/yaml.v3" + + "github.com/google/gnostic-models/jsonschema" +) + +// compiler helper functions, usually called from generated code + +// UnpackMap gets a *yaml.Node if possible. +func UnpackMap(in *yaml.Node) (*yaml.Node, bool) { + if in == nil { + return nil, false + } + return in, true +} + +// SortedKeysForMap returns the sorted keys of a yamlv2.MapSlice. +func SortedKeysForMap(m *yaml.Node) []string { + keys := make([]string, 0) + if m.Kind == yaml.MappingNode { + for i := 0; i < len(m.Content); i += 2 { + keys = append(keys, m.Content[i].Value) + } + } + sort.Strings(keys) + return keys +} + +// MapHasKey returns true if a yamlv2.MapSlice contains a specified key. +func MapHasKey(m *yaml.Node, key string) bool { + if m == nil { + return false + } + if m.Kind == yaml.MappingNode { + for i := 0; i < len(m.Content); i += 2 { + itemKey := m.Content[i].Value + if key == itemKey { + return true + } + } + } + return false +} + +// MapValueForKey gets the value of a map value for a specified key. +func MapValueForKey(m *yaml.Node, key string) *yaml.Node { + if m == nil { + return nil + } + if m.Kind == yaml.MappingNode { + for i := 0; i < len(m.Content); i += 2 { + itemKey := m.Content[i].Value + if key == itemKey { + return m.Content[i+1] + } + } + } + return nil +} + +// ConvertInterfaceArrayToStringArray converts an array of interfaces to an array of strings, if possible. +func ConvertInterfaceArrayToStringArray(interfaceArray []interface{}) []string { + stringArray := make([]string, 0) + for _, item := range interfaceArray { + v, ok := item.(string) + if ok { + stringArray = append(stringArray, v) + } + } + return stringArray +} + +// SequenceNodeForNode returns a node if it is a SequenceNode. +func SequenceNodeForNode(node *yaml.Node) (*yaml.Node, bool) { + if node.Kind != yaml.SequenceNode { + return nil, false + } + return node, true +} + +// BoolForScalarNode returns the bool value of a node. +func BoolForScalarNode(node *yaml.Node) (bool, bool) { + if node == nil { + return false, false + } + if node.Kind == yaml.DocumentNode { + return BoolForScalarNode(node.Content[0]) + } + if node.Kind != yaml.ScalarNode { + return false, false + } + if node.Tag != "!!bool" { + return false, false + } + v, err := strconv.ParseBool(node.Value) + if err != nil { + return false, false + } + return v, true +} + +// IntForScalarNode returns the integer value of a node. +func IntForScalarNode(node *yaml.Node) (int64, bool) { + if node == nil { + return 0, false + } + if node.Kind == yaml.DocumentNode { + return IntForScalarNode(node.Content[0]) + } + if node.Kind != yaml.ScalarNode { + return 0, false + } + if node.Tag != "!!int" { + return 0, false + } + v, err := strconv.ParseInt(node.Value, 10, 64) + if err != nil { + return 0, false + } + return v, true +} + +// FloatForScalarNode returns the float value of a node. +func FloatForScalarNode(node *yaml.Node) (float64, bool) { + if node == nil { + return 0.0, false + } + if node.Kind == yaml.DocumentNode { + return FloatForScalarNode(node.Content[0]) + } + if node.Kind != yaml.ScalarNode { + return 0.0, false + } + if (node.Tag != "!!int") && (node.Tag != "!!float") { + return 0.0, false + } + v, err := strconv.ParseFloat(node.Value, 64) + if err != nil { + return 0.0, false + } + return v, true +} + +// StringForScalarNode returns the string value of a node. +func StringForScalarNode(node *yaml.Node) (string, bool) { + if node == nil { + return "", false + } + if node.Kind == yaml.DocumentNode { + return StringForScalarNode(node.Content[0]) + } + switch node.Kind { + case yaml.ScalarNode: + switch node.Tag { + case "!!int": + return node.Value, true + case "!!str": + return node.Value, true + case "!!timestamp": + return node.Value, true + case "!!null": + return "", true + default: + return "", false + } + default: + return "", false + } +} + +// StringArrayForSequenceNode converts a sequence node to an array of strings, if possible. +func StringArrayForSequenceNode(node *yaml.Node) []string { + stringArray := make([]string, 0) + for _, item := range node.Content { + v, ok := StringForScalarNode(item) + if ok { + stringArray = append(stringArray, v) + } + } + return stringArray +} + +// MissingKeysInMap identifies which keys from a list of required keys are not in a map. +func MissingKeysInMap(m *yaml.Node, requiredKeys []string) []string { + missingKeys := make([]string, 0) + for _, k := range requiredKeys { + if !MapHasKey(m, k) { + missingKeys = append(missingKeys, k) + } + } + return missingKeys +} + +// InvalidKeysInMap returns keys in a map that don't match a list of allowed keys and patterns. +func InvalidKeysInMap(m *yaml.Node, allowedKeys []string, allowedPatterns []*regexp.Regexp) []string { + invalidKeys := make([]string, 0) + if m == nil || m.Kind != yaml.MappingNode { + return invalidKeys + } + for i := 0; i < len(m.Content); i += 2 { + key := m.Content[i].Value + found := false + // does the key match an allowed key? + for _, allowedKey := range allowedKeys { + if key == allowedKey { + found = true + break + } + } + if !found { + // does the key match an allowed pattern? + for _, allowedPattern := range allowedPatterns { + if allowedPattern.MatchString(key) { + found = true + break + } + } + if !found { + invalidKeys = append(invalidKeys, key) + } + } + } + return invalidKeys +} + +// NewNullNode creates a new Null node. +func NewNullNode() *yaml.Node { + node := &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!null", + } + return node +} + +// NewMappingNode creates a new Mapping node. +func NewMappingNode() *yaml.Node { + return &yaml.Node{ + Kind: yaml.MappingNode, + Content: make([]*yaml.Node, 0), + } +} + +// NewSequenceNode creates a new Sequence node. +func NewSequenceNode() *yaml.Node { + node := &yaml.Node{ + Kind: yaml.SequenceNode, + Content: make([]*yaml.Node, 0), + } + return node +} + +// NewScalarNodeForString creates a new node to hold a string. +func NewScalarNodeForString(s string) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: s, + } +} + +// NewSequenceNodeForStringArray creates a new node to hold an array of strings. +func NewSequenceNodeForStringArray(strings []string) *yaml.Node { + node := &yaml.Node{ + Kind: yaml.SequenceNode, + Content: make([]*yaml.Node, 0), + } + for _, s := range strings { + node.Content = append(node.Content, NewScalarNodeForString(s)) + } + return node +} + +// NewScalarNodeForBool creates a new node to hold a bool. +func NewScalarNodeForBool(b bool) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!bool", + Value: fmt.Sprintf("%t", b), + } +} + +// NewScalarNodeForFloat creates a new node to hold a float. +func NewScalarNodeForFloat(f float64) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!float", + Value: fmt.Sprintf("%g", f), + } +} + +// NewScalarNodeForInt creates a new node to hold an integer. +func NewScalarNodeForInt(i int64) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!int", + Value: fmt.Sprintf("%d", i), + } +} + +// PluralProperties returns the string "properties" pluralized. +func PluralProperties(count int) string { + if count == 1 { + return "property" + } + return "properties" +} + +// StringArrayContainsValue returns true if a string array contains a specified value. +func StringArrayContainsValue(array []string, value string) bool { + for _, item := range array { + if item == value { + return true + } + } + return false +} + +// StringArrayContainsValues returns true if a string array contains all of a list of specified values. +func StringArrayContainsValues(array []string, values []string) bool { + for _, value := range values { + if !StringArrayContainsValue(array, value) { + return false + } + } + return true +} + +// StringValue returns the string value of an item. +func StringValue(item interface{}) (value string, ok bool) { + value, ok = item.(string) + if ok { + return value, ok + } + intValue, ok := item.(int) + if ok { + return strconv.Itoa(intValue), true + } + return "", false +} + +// Description returns a human-readable represention of an item. +func Description(item interface{}) string { + value, ok := item.(*yaml.Node) + if ok { + return jsonschema.Render(value) + } + return fmt.Sprintf("%+v", item) +} + +// Display returns a description of a node for use in error messages. +func Display(node *yaml.Node) string { + switch node.Kind { + case yaml.ScalarNode: + switch node.Tag { + case "!!str": + return fmt.Sprintf("%s (string)", node.Value) + } + } + return fmt.Sprintf("%+v (%T)", node, node) +} + +// Marshal creates a yaml version of a structure in our preferred style +func Marshal(in *yaml.Node) []byte { + clearStyle(in) + //bytes, _ := yaml.Marshal(&yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{in}}) + bytes, _ := yaml.Marshal(in) + + return bytes +} + +func clearStyle(node *yaml.Node) { + node.Style = 0 + for _, c := range node.Content { + clearStyle(c) + } +} diff --git a/vendor/github.com/google/gnostic-models/compiler/main.go b/vendor/github.com/google/gnostic-models/compiler/main.go new file mode 100644 index 00000000..ce9fcc45 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/main.go @@ -0,0 +1,16 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package compiler provides support functions to generated compiler code. +package compiler diff --git a/vendor/github.com/google/gnostic-models/compiler/reader.go b/vendor/github.com/google/gnostic-models/compiler/reader.go new file mode 100644 index 00000000..be0e8b40 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/compiler/reader.go @@ -0,0 +1,307 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package compiler + +import ( + "fmt" + "io/ioutil" + "log" + "net/http" + "net/url" + "path/filepath" + "strings" + "sync" + + yaml "gopkg.in/yaml.v3" +) + +var verboseReader = false + +var fileCache map[string][]byte +var infoCache map[string]*yaml.Node + +var fileCacheEnable = true +var infoCacheEnable = true + +// These locks are used to synchronize accesses to the fileCache and infoCache +// maps (above). They are global state and can throw thread-related errors +// when modified from separate goroutines. The general strategy is to protect +// all public functions in this file with mutex Lock() calls. As a result, to +// avoid deadlock, these public functions should not call other public +// functions, so some public functions have private equivalents. +// In the future, we might consider replacing the maps with sync.Map and +// eliminating these mutexes. +var fileCacheMutex sync.Mutex +var infoCacheMutex sync.Mutex + +func initializeFileCache() { + if fileCache == nil { + fileCache = make(map[string][]byte, 0) + } +} + +func initializeInfoCache() { + if infoCache == nil { + infoCache = make(map[string]*yaml.Node, 0) + } +} + +// EnableFileCache turns on file caching. +func EnableFileCache() { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + fileCacheEnable = true +} + +// EnableInfoCache turns on parsed info caching. +func EnableInfoCache() { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + infoCacheEnable = true +} + +// DisableFileCache turns off file caching. +func DisableFileCache() { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + fileCacheEnable = false +} + +// DisableInfoCache turns off parsed info caching. +func DisableInfoCache() { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + infoCacheEnable = false +} + +// RemoveFromFileCache removes an entry from the file cache. +func RemoveFromFileCache(fileurl string) { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + if !fileCacheEnable { + return + } + initializeFileCache() + delete(fileCache, fileurl) +} + +// RemoveFromInfoCache removes an entry from the info cache. +func RemoveFromInfoCache(filename string) { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + if !infoCacheEnable { + return + } + initializeInfoCache() + delete(infoCache, filename) +} + +// GetInfoCache returns the info cache map. +func GetInfoCache() map[string]*yaml.Node { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + if infoCache == nil { + initializeInfoCache() + } + return infoCache +} + +// ClearFileCache clears the file cache. +func ClearFileCache() { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + fileCache = make(map[string][]byte, 0) +} + +// ClearInfoCache clears the info cache. +func ClearInfoCache() { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + infoCache = make(map[string]*yaml.Node) +} + +// ClearCaches clears all caches. +func ClearCaches() { + ClearFileCache() + ClearInfoCache() +} + +// FetchFile gets a specified file from the local filesystem or a remote location. +func FetchFile(fileurl string) ([]byte, error) { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + return fetchFile(fileurl) +} + +func fetchFile(fileurl string) ([]byte, error) { + var bytes []byte + initializeFileCache() + if fileCacheEnable { + bytes, ok := fileCache[fileurl] + if ok { + if verboseReader { + log.Printf("Cache hit %s", fileurl) + } + return bytes, nil + } + if verboseReader { + log.Printf("Fetching %s", fileurl) + } + } + response, err := http.Get(fileurl) + if err != nil { + return nil, err + } + defer response.Body.Close() + if response.StatusCode != 200 { + return nil, fmt.Errorf("Error downloading %s: %s", fileurl, response.Status) + } + bytes, err = ioutil.ReadAll(response.Body) + if fileCacheEnable && err == nil { + fileCache[fileurl] = bytes + } + return bytes, err +} + +// ReadBytesForFile reads the bytes of a file. +func ReadBytesForFile(filename string) ([]byte, error) { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + return readBytesForFile(filename) +} + +func readBytesForFile(filename string) ([]byte, error) { + // is the filename a url? + fileurl, _ := url.Parse(filename) + if fileurl.Scheme != "" { + // yes, fetch it + bytes, err := fetchFile(filename) + if err != nil { + return nil, err + } + return bytes, nil + } + // no, it's a local filename + bytes, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + return bytes, nil +} + +// ReadInfoFromBytes unmarshals a file as a *yaml.Node. +func ReadInfoFromBytes(filename string, bytes []byte) (*yaml.Node, error) { + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + return readInfoFromBytes(filename, bytes) +} + +func readInfoFromBytes(filename string, bytes []byte) (*yaml.Node, error) { + initializeInfoCache() + if infoCacheEnable { + cachedInfo, ok := infoCache[filename] + if ok { + if verboseReader { + log.Printf("Cache hit info for file %s", filename) + } + return cachedInfo, nil + } + if verboseReader { + log.Printf("Reading info for file %s", filename) + } + } + var info yaml.Node + err := yaml.Unmarshal(bytes, &info) + if err != nil { + return nil, err + } + if infoCacheEnable && len(filename) > 0 { + infoCache[filename] = &info + } + return &info, nil +} + +// ReadInfoForRef reads a file and return the fragment needed to resolve a $ref. +func ReadInfoForRef(basefile string, ref string) (*yaml.Node, error) { + fileCacheMutex.Lock() + defer fileCacheMutex.Unlock() + infoCacheMutex.Lock() + defer infoCacheMutex.Unlock() + initializeInfoCache() + if infoCacheEnable { + info, ok := infoCache[ref] + if ok { + if verboseReader { + log.Printf("Cache hit for ref %s#%s", basefile, ref) + } + return info, nil + } + if verboseReader { + log.Printf("Reading info for ref %s#%s", basefile, ref) + } + } + basedir, _ := filepath.Split(basefile) + parts := strings.Split(ref, "#") + var filename string + if parts[0] != "" { + filename = parts[0] + if _, err := url.ParseRequestURI(parts[0]); err != nil { + // It is not an URL, so the file is local + filename = basedir + parts[0] + } + } else { + filename = basefile + } + bytes, err := readBytesForFile(filename) + if err != nil { + return nil, err + } + info, err := readInfoFromBytes(filename, bytes) + if info != nil && info.Kind == yaml.DocumentNode { + info = info.Content[0] + } + if err != nil { + log.Printf("File error: %v\n", err) + } else { + if info == nil { + return nil, NewError(nil, fmt.Sprintf("could not resolve %s", ref)) + } + if len(parts) > 1 { + path := strings.Split(parts[1], "/") + for i, key := range path { + if i > 0 { + m := info + if true { + found := false + for i := 0; i < len(m.Content); i += 2 { + if m.Content[i].Value == key { + info = m.Content[i+1] + found = true + } + } + if !found { + infoCache[ref] = nil + return nil, NewError(nil, fmt.Sprintf("could not resolve %s", ref)) + } + } + } + } + } + } + if infoCacheEnable { + infoCache[ref] = info + } + return info, nil +} diff --git a/vendor/github.com/google/gnostic-models/extensions/README.md b/vendor/github.com/google/gnostic-models/extensions/README.md new file mode 100644 index 00000000..4b5d63e5 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/extensions/README.md @@ -0,0 +1,13 @@ +# Extensions + +**Extension Support is experimental.** + +This directory contains support code for building Gnostic extensio handlers and +associated examples. + +Extension handlers can be used to compile vendor or specification extensions +into protocol buffer structures. + +Like plugins, extension handlers are built as separate executables. Extension +bodies are written to extension handlers as serialized +ExtensionHandlerRequests. diff --git a/vendor/github.com/google/gnostic-models/extensions/extension.pb.go b/vendor/github.com/google/gnostic-models/extensions/extension.pb.go new file mode 100644 index 00000000..a71df8ab --- /dev/null +++ b/vendor/github.com/google/gnostic-models/extensions/extension.pb.go @@ -0,0 +1,461 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.27.1 +// protoc v3.19.3 +// source: extensions/extension.proto + +package gnostic_extension_v1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + anypb "google.golang.org/protobuf/types/known/anypb" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// The version number of Gnostic. +type Version struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Major int32 `protobuf:"varint,1,opt,name=major,proto3" json:"major,omitempty"` + Minor int32 `protobuf:"varint,2,opt,name=minor,proto3" json:"minor,omitempty"` + Patch int32 `protobuf:"varint,3,opt,name=patch,proto3" json:"patch,omitempty"` + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + Suffix string `protobuf:"bytes,4,opt,name=suffix,proto3" json:"suffix,omitempty"` +} + +func (x *Version) Reset() { + *x = Version{} + if protoimpl.UnsafeEnabled { + mi := &file_extensions_extension_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Version) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Version) ProtoMessage() {} + +func (x *Version) ProtoReflect() protoreflect.Message { + mi := &file_extensions_extension_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Version.ProtoReflect.Descriptor instead. +func (*Version) Descriptor() ([]byte, []int) { + return file_extensions_extension_proto_rawDescGZIP(), []int{0} +} + +func (x *Version) GetMajor() int32 { + if x != nil { + return x.Major + } + return 0 +} + +func (x *Version) GetMinor() int32 { + if x != nil { + return x.Minor + } + return 0 +} + +func (x *Version) GetPatch() int32 { + if x != nil { + return x.Patch + } + return 0 +} + +func (x *Version) GetSuffix() string { + if x != nil { + return x.Suffix + } + return "" +} + +// An encoded Request is written to the ExtensionHandler's stdin. +type ExtensionHandlerRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The extension to process. + Wrapper *Wrapper `protobuf:"bytes,1,opt,name=wrapper,proto3" json:"wrapper,omitempty"` + // The version number of Gnostic. + CompilerVersion *Version `protobuf:"bytes,2,opt,name=compiler_version,json=compilerVersion,proto3" json:"compiler_version,omitempty"` +} + +func (x *ExtensionHandlerRequest) Reset() { + *x = ExtensionHandlerRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_extensions_extension_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExtensionHandlerRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExtensionHandlerRequest) ProtoMessage() {} + +func (x *ExtensionHandlerRequest) ProtoReflect() protoreflect.Message { + mi := &file_extensions_extension_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExtensionHandlerRequest.ProtoReflect.Descriptor instead. +func (*ExtensionHandlerRequest) Descriptor() ([]byte, []int) { + return file_extensions_extension_proto_rawDescGZIP(), []int{1} +} + +func (x *ExtensionHandlerRequest) GetWrapper() *Wrapper { + if x != nil { + return x.Wrapper + } + return nil +} + +func (x *ExtensionHandlerRequest) GetCompilerVersion() *Version { + if x != nil { + return x.CompilerVersion + } + return nil +} + +// The extensions writes an encoded ExtensionHandlerResponse to stdout. +type ExtensionHandlerResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // true if the extension is handled by the extension handler; false otherwise + Handled bool `protobuf:"varint,1,opt,name=handled,proto3" json:"handled,omitempty"` + // Error message(s). If non-empty, the extension handling failed. + // The extension handler process should exit with status code zero + // even if it reports an error in this way. + // + // This should be used to indicate errors which prevent the extension from + // operating as intended. Errors which indicate a problem in gnostic + // itself -- such as the input Document being unparseable -- should be + // reported by writing a message to stderr and exiting with a non-zero + // status code. + Errors []string `protobuf:"bytes,2,rep,name=errors,proto3" json:"errors,omitempty"` + // text output + Value *anypb.Any `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *ExtensionHandlerResponse) Reset() { + *x = ExtensionHandlerResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_extensions_extension_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExtensionHandlerResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExtensionHandlerResponse) ProtoMessage() {} + +func (x *ExtensionHandlerResponse) ProtoReflect() protoreflect.Message { + mi := &file_extensions_extension_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExtensionHandlerResponse.ProtoReflect.Descriptor instead. +func (*ExtensionHandlerResponse) Descriptor() ([]byte, []int) { + return file_extensions_extension_proto_rawDescGZIP(), []int{2} +} + +func (x *ExtensionHandlerResponse) GetHandled() bool { + if x != nil { + return x.Handled + } + return false +} + +func (x *ExtensionHandlerResponse) GetErrors() []string { + if x != nil { + return x.Errors + } + return nil +} + +func (x *ExtensionHandlerResponse) GetValue() *anypb.Any { + if x != nil { + return x.Value + } + return nil +} + +type Wrapper struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // version of the OpenAPI specification in which this extension was written. + Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + // Name of the extension. + ExtensionName string `protobuf:"bytes,2,opt,name=extension_name,json=extensionName,proto3" json:"extension_name,omitempty"` + // YAML-formatted extension value. + Yaml string `protobuf:"bytes,3,opt,name=yaml,proto3" json:"yaml,omitempty"` +} + +func (x *Wrapper) Reset() { + *x = Wrapper{} + if protoimpl.UnsafeEnabled { + mi := &file_extensions_extension_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Wrapper) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Wrapper) ProtoMessage() {} + +func (x *Wrapper) ProtoReflect() protoreflect.Message { + mi := &file_extensions_extension_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Wrapper.ProtoReflect.Descriptor instead. +func (*Wrapper) Descriptor() ([]byte, []int) { + return file_extensions_extension_proto_rawDescGZIP(), []int{3} +} + +func (x *Wrapper) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *Wrapper) GetExtensionName() string { + if x != nil { + return x.ExtensionName + } + return "" +} + +func (x *Wrapper) GetYaml() string { + if x != nil { + return x.Yaml + } + return "" +} + +var File_extensions_extension_proto protoreflect.FileDescriptor + +var file_extensions_extension_proto_rawDesc = []byte{ + 0x0a, 0x1a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x65, 0x78, 0x74, + 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x14, 0x67, 0x6e, + 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2e, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x31, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x63, 0x0a, + 0x07, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6d, 0x61, 0x6a, 0x6f, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6d, 0x61, 0x6a, 0x6f, 0x72, 0x12, 0x14, + 0x0a, 0x05, 0x6d, 0x69, 0x6e, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6d, + 0x69, 0x6e, 0x6f, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x61, 0x74, 0x63, 0x68, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x05, 0x70, 0x61, 0x74, 0x63, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x75, + 0x66, 0x66, 0x69, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x75, 0x66, 0x66, + 0x69, 0x78, 0x22, 0x9c, 0x01, 0x0a, 0x17, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, + 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x37, + 0x0a, 0x07, 0x77, 0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1d, 0x2e, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2e, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x57, 0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x52, 0x07, + 0x77, 0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x12, 0x48, 0x0a, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1d, 0x2e, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2e, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x52, 0x0f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x22, 0x78, 0x0a, 0x18, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x61, + 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, + 0x07, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, + 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x12, + 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x5e, 0x0a, 0x07, 0x57, + 0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x12, 0x25, 0x0a, 0x0e, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x79, 0x61, 0x6d, 0x6c, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x79, 0x61, 0x6d, 0x6c, 0x42, 0x4d, 0x0a, 0x0e, 0x6f, + 0x72, 0x67, 0x2e, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2e, 0x76, 0x31, 0x42, 0x10, 0x47, + 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x50, + 0x01, 0x5a, 0x21, 0x2e, 0x2f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x3b, + 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, + 0x6e, 0x5f, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x47, 0x4e, 0x58, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, +} + +var ( + file_extensions_extension_proto_rawDescOnce sync.Once + file_extensions_extension_proto_rawDescData = file_extensions_extension_proto_rawDesc +) + +func file_extensions_extension_proto_rawDescGZIP() []byte { + file_extensions_extension_proto_rawDescOnce.Do(func() { + file_extensions_extension_proto_rawDescData = protoimpl.X.CompressGZIP(file_extensions_extension_proto_rawDescData) + }) + return file_extensions_extension_proto_rawDescData +} + +var file_extensions_extension_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_extensions_extension_proto_goTypes = []interface{}{ + (*Version)(nil), // 0: gnostic.extension.v1.Version + (*ExtensionHandlerRequest)(nil), // 1: gnostic.extension.v1.ExtensionHandlerRequest + (*ExtensionHandlerResponse)(nil), // 2: gnostic.extension.v1.ExtensionHandlerResponse + (*Wrapper)(nil), // 3: gnostic.extension.v1.Wrapper + (*anypb.Any)(nil), // 4: google.protobuf.Any +} +var file_extensions_extension_proto_depIdxs = []int32{ + 3, // 0: gnostic.extension.v1.ExtensionHandlerRequest.wrapper:type_name -> gnostic.extension.v1.Wrapper + 0, // 1: gnostic.extension.v1.ExtensionHandlerRequest.compiler_version:type_name -> gnostic.extension.v1.Version + 4, // 2: gnostic.extension.v1.ExtensionHandlerResponse.value:type_name -> google.protobuf.Any + 3, // [3:3] is the sub-list for method output_type + 3, // [3:3] is the sub-list for method input_type + 3, // [3:3] is the sub-list for extension type_name + 3, // [3:3] is the sub-list for extension extendee + 0, // [0:3] is the sub-list for field type_name +} + +func init() { file_extensions_extension_proto_init() } +func file_extensions_extension_proto_init() { + if File_extensions_extension_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_extensions_extension_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Version); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_extensions_extension_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExtensionHandlerRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_extensions_extension_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExtensionHandlerResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_extensions_extension_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Wrapper); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_extensions_extension_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_extensions_extension_proto_goTypes, + DependencyIndexes: file_extensions_extension_proto_depIdxs, + MessageInfos: file_extensions_extension_proto_msgTypes, + }.Build() + File_extensions_extension_proto = out.File + file_extensions_extension_proto_rawDesc = nil + file_extensions_extension_proto_goTypes = nil + file_extensions_extension_proto_depIdxs = nil +} diff --git a/vendor/github.com/google/gnostic-models/extensions/extension.proto b/vendor/github.com/google/gnostic-models/extensions/extension.proto new file mode 100644 index 00000000..875137c1 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/extensions/extension.proto @@ -0,0 +1,97 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package gnostic.extension.v1; + +import "google/protobuf/any.proto"; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "GnosticExtension"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.gnostic.v1"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +// +// "Gnostic Extension" +option objc_class_prefix = "GNX"; + +// The Go package name. +option go_package = "./extensions;gnostic_extension_v1"; + +// The version number of Gnostic. +message Version { + int32 major = 1; + int32 minor = 2; + int32 patch = 3; + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + string suffix = 4; +} + +// An encoded Request is written to the ExtensionHandler's stdin. +message ExtensionHandlerRequest { + + // The extension to process. + Wrapper wrapper = 1; + + // The version number of Gnostic. + Version compiler_version = 2; +} + +// The extensions writes an encoded ExtensionHandlerResponse to stdout. +message ExtensionHandlerResponse { + + // true if the extension is handled by the extension handler; false otherwise + bool handled = 1; + + // Error message(s). If non-empty, the extension handling failed. + // The extension handler process should exit with status code zero + // even if it reports an error in this way. + // + // This should be used to indicate errors which prevent the extension from + // operating as intended. Errors which indicate a problem in gnostic + // itself -- such as the input Document being unparseable -- should be + // reported by writing a message to stderr and exiting with a non-zero + // status code. + repeated string errors = 2; + + // text output + google.protobuf.Any value = 3; +} + +message Wrapper { + // version of the OpenAPI specification in which this extension was written. + string version = 1; + + // Name of the extension. + string extension_name = 2; + + // YAML-formatted extension value. + string yaml = 3; +} diff --git a/vendor/github.com/google/gnostic-models/extensions/extensions.go b/vendor/github.com/google/gnostic-models/extensions/extensions.go new file mode 100644 index 00000000..ec8afd00 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/extensions/extensions.go @@ -0,0 +1,64 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gnostic_extension_v1 + +import ( + "io/ioutil" + "log" + "os" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes" +) + +type extensionHandler func(name string, yamlInput string) (bool, proto.Message, error) + +// Main implements the main program of an extension handler. +func Main(handler extensionHandler) { + // unpack the request + data, err := ioutil.ReadAll(os.Stdin) + if err != nil { + log.Println("File error:", err.Error()) + os.Exit(1) + } + if len(data) == 0 { + log.Println("No input data.") + os.Exit(1) + } + request := &ExtensionHandlerRequest{} + err = proto.Unmarshal(data, request) + if err != nil { + log.Println("Input error:", err.Error()) + os.Exit(1) + } + // call the handler + handled, output, err := handler(request.Wrapper.ExtensionName, request.Wrapper.Yaml) + // respond with the output of the handler + response := &ExtensionHandlerResponse{ + Handled: false, // default assumption + Errors: make([]string, 0), + } + if err != nil { + response.Errors = append(response.Errors, err.Error()) + } else if handled { + response.Handled = true + response.Value, err = ptypes.MarshalAny(output) + if err != nil { + response.Errors = append(response.Errors, err.Error()) + } + } + responseBytes, _ := proto.Marshal(response) + os.Stdout.Write(responseBytes) +} diff --git a/vendor/github.com/google/gnostic-models/jsonschema/README.md b/vendor/github.com/google/gnostic-models/jsonschema/README.md new file mode 100644 index 00000000..6793c517 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/jsonschema/README.md @@ -0,0 +1,4 @@ +# jsonschema + +This directory contains code for reading, writing, and manipulating JSON +schemas. diff --git a/vendor/github.com/google/gnostic-models/jsonschema/base.go b/vendor/github.com/google/gnostic-models/jsonschema/base.go new file mode 100644 index 00000000..5fcc4885 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/jsonschema/base.go @@ -0,0 +1,97 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +package jsonschema + +import ( + "encoding/base64" +) + +func baseSchemaBytes() ([]byte, error){ + return base64.StdEncoding.DecodeString( +`ewogICAgImlkIjogImh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDQvc2NoZW1hIyIsCiAgICAi +JHNjaGVtYSI6ICJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA0L3NjaGVtYSMiLAogICAgImRl +c2NyaXB0aW9uIjogIkNvcmUgc2NoZW1hIG1ldGEtc2NoZW1hIiwKICAgICJkZWZpbml0aW9ucyI6IHsK +ICAgICAgICAic2NoZW1hQXJyYXkiOiB7CiAgICAgICAgICAgICJ0eXBlIjogImFycmF5IiwKICAgICAg +ICAgICAgIm1pbkl0ZW1zIjogMSwKICAgICAgICAgICAgIml0ZW1zIjogeyAiJHJlZiI6ICIjIiB9CiAg +ICAgICAgfSwKICAgICAgICAicG9zaXRpdmVJbnRlZ2VyIjogewogICAgICAgICAgICAidHlwZSI6ICJp +bnRlZ2VyIiwKICAgICAgICAgICAgIm1pbmltdW0iOiAwCiAgICAgICAgfSwKICAgICAgICAicG9zaXRp +dmVJbnRlZ2VyRGVmYXVsdDAiOiB7CiAgICAgICAgICAgICJhbGxPZiI6IFsgeyAiJHJlZiI6ICIjL2Rl +ZmluaXRpb25zL3Bvc2l0aXZlSW50ZWdlciIgfSwgeyAiZGVmYXVsdCI6IDAgfSBdCiAgICAgICAgfSwK +ICAgICAgICAic2ltcGxlVHlwZXMiOiB7CiAgICAgICAgICAgICJlbnVtIjogWyAiYXJyYXkiLCAiYm9v +bGVhbiIsICJpbnRlZ2VyIiwgIm51bGwiLCAibnVtYmVyIiwgIm9iamVjdCIsICJzdHJpbmciIF0KICAg +ICAgICB9LAogICAgICAgICJzdHJpbmdBcnJheSI6IHsKICAgICAgICAgICAgInR5cGUiOiAiYXJyYXki +LAogICAgICAgICAgICAiaXRlbXMiOiB7ICJ0eXBlIjogInN0cmluZyIgfSwKICAgICAgICAgICAgIm1p +bkl0ZW1zIjogMSwKICAgICAgICAgICAgInVuaXF1ZUl0ZW1zIjogdHJ1ZQogICAgICAgIH0KICAgIH0s +CiAgICAidHlwZSI6ICJvYmplY3QiLAogICAgInByb3BlcnRpZXMiOiB7CiAgICAgICAgImlkIjogewog +ICAgICAgICAgICAidHlwZSI6ICJzdHJpbmciLAogICAgICAgICAgICAiZm9ybWF0IjogInVyaSIKICAg +ICAgICB9LAogICAgICAgICIkc2NoZW1hIjogewogICAgICAgICAgICAidHlwZSI6ICJzdHJpbmciLAog +ICAgICAgICAgICAiZm9ybWF0IjogInVyaSIKICAgICAgICB9LAogICAgICAgICJ0aXRsZSI6IHsKICAg +ICAgICAgICAgInR5cGUiOiAic3RyaW5nIgogICAgICAgIH0sCiAgICAgICAgImRlc2NyaXB0aW9uIjog +ewogICAgICAgICAgICAidHlwZSI6ICJzdHJpbmciCiAgICAgICAgfSwKICAgICAgICAiZGVmYXVsdCI6 +IHt9LAogICAgICAgICJtdWx0aXBsZU9mIjogewogICAgICAgICAgICAidHlwZSI6ICJudW1iZXIiLAog +ICAgICAgICAgICAibWluaW11bSI6IDAsCiAgICAgICAgICAgICJleGNsdXNpdmVNaW5pbXVtIjogdHJ1 +ZQogICAgICAgIH0sCiAgICAgICAgIm1heGltdW0iOiB7CiAgICAgICAgICAgICJ0eXBlIjogIm51bWJl +ciIKICAgICAgICB9LAogICAgICAgICJleGNsdXNpdmVNYXhpbXVtIjogewogICAgICAgICAgICAidHlw +ZSI6ICJib29sZWFuIiwKICAgICAgICAgICAgImRlZmF1bHQiOiBmYWxzZQogICAgICAgIH0sCiAgICAg +ICAgIm1pbmltdW0iOiB7CiAgICAgICAgICAgICJ0eXBlIjogIm51bWJlciIKICAgICAgICB9LAogICAg +ICAgICJleGNsdXNpdmVNaW5pbXVtIjogewogICAgICAgICAgICAidHlwZSI6ICJib29sZWFuIiwKICAg +ICAgICAgICAgImRlZmF1bHQiOiBmYWxzZQogICAgICAgIH0sCiAgICAgICAgIm1heExlbmd0aCI6IHsg +IiRyZWYiOiAiIy9kZWZpbml0aW9ucy9wb3NpdGl2ZUludGVnZXIiIH0sCiAgICAgICAgIm1pbkxlbmd0 +aCI6IHsgIiRyZWYiOiAiIy9kZWZpbml0aW9ucy9wb3NpdGl2ZUludGVnZXJEZWZhdWx0MCIgfSwKICAg +ICAgICAicGF0dGVybiI6IHsKICAgICAgICAgICAgInR5cGUiOiAic3RyaW5nIiwKICAgICAgICAgICAg +ImZvcm1hdCI6ICJyZWdleCIKICAgICAgICB9LAogICAgICAgICJhZGRpdGlvbmFsSXRlbXMiOiB7CiAg +ICAgICAgICAgICJhbnlPZiI6IFsKICAgICAgICAgICAgICAgIHsgInR5cGUiOiAiYm9vbGVhbiIgfSwK +ICAgICAgICAgICAgICAgIHsgIiRyZWYiOiAiIyIgfQogICAgICAgICAgICBdLAogICAgICAgICAgICAi +ZGVmYXVsdCI6IHt9CiAgICAgICAgfSwKICAgICAgICAiaXRlbXMiOiB7CiAgICAgICAgICAgICJhbnlP +ZiI6IFsKICAgICAgICAgICAgICAgIHsgIiRyZWYiOiAiIyIgfSwKICAgICAgICAgICAgICAgIHsgIiRy +ZWYiOiAiIy9kZWZpbml0aW9ucy9zY2hlbWFBcnJheSIgfQogICAgICAgICAgICBdLAogICAgICAgICAg +ICAiZGVmYXVsdCI6IHt9CiAgICAgICAgfSwKICAgICAgICAibWF4SXRlbXMiOiB7ICIkcmVmIjogIiMv +ZGVmaW5pdGlvbnMvcG9zaXRpdmVJbnRlZ2VyIiB9LAogICAgICAgICJtaW5JdGVtcyI6IHsgIiRyZWYi +OiAiIy9kZWZpbml0aW9ucy9wb3NpdGl2ZUludGVnZXJEZWZhdWx0MCIgfSwKICAgICAgICAidW5pcXVl +SXRlbXMiOiB7CiAgICAgICAgICAgICJ0eXBlIjogImJvb2xlYW4iLAogICAgICAgICAgICAiZGVmYXVs +dCI6IGZhbHNlCiAgICAgICAgfSwKICAgICAgICAibWF4UHJvcGVydGllcyI6IHsgIiRyZWYiOiAiIy9k +ZWZpbml0aW9ucy9wb3NpdGl2ZUludGVnZXIiIH0sCiAgICAgICAgIm1pblByb3BlcnRpZXMiOiB7ICIk +cmVmIjogIiMvZGVmaW5pdGlvbnMvcG9zaXRpdmVJbnRlZ2VyRGVmYXVsdDAiIH0sCiAgICAgICAgInJl +cXVpcmVkIjogeyAiJHJlZiI6ICIjL2RlZmluaXRpb25zL3N0cmluZ0FycmF5IiB9LAogICAgICAgICJh +ZGRpdGlvbmFsUHJvcGVydGllcyI6IHsKICAgICAgICAgICAgImFueU9mIjogWwogICAgICAgICAgICAg +ICAgeyAidHlwZSI6ICJib29sZWFuIiB9LAogICAgICAgICAgICAgICAgeyAiJHJlZiI6ICIjIiB9CiAg +ICAgICAgICAgIF0sCiAgICAgICAgICAgICJkZWZhdWx0Ijoge30KICAgICAgICB9LAogICAgICAgICJk +ZWZpbml0aW9ucyI6IHsKICAgICAgICAgICAgInR5cGUiOiAib2JqZWN0IiwKICAgICAgICAgICAgImFk +ZGl0aW9uYWxQcm9wZXJ0aWVzIjogeyAiJHJlZiI6ICIjIiB9LAogICAgICAgICAgICAiZGVmYXVsdCI6 +IHt9CiAgICAgICAgfSwKICAgICAgICAicHJvcGVydGllcyI6IHsKICAgICAgICAgICAgInR5cGUiOiAi +b2JqZWN0IiwKICAgICAgICAgICAgImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjogeyAiJHJlZiI6ICIjIiB9 +LAogICAgICAgICAgICAiZGVmYXVsdCI6IHt9CiAgICAgICAgfSwKICAgICAgICAicGF0dGVyblByb3Bl +cnRpZXMiOiB7CiAgICAgICAgICAgICJ0eXBlIjogIm9iamVjdCIsCiAgICAgICAgICAgICJhZGRpdGlv +bmFsUHJvcGVydGllcyI6IHsgIiRyZWYiOiAiIyIgfSwKICAgICAgICAgICAgImRlZmF1bHQiOiB7fQog +ICAgICAgIH0sCiAgICAgICAgImRlcGVuZGVuY2llcyI6IHsKICAgICAgICAgICAgInR5cGUiOiAib2Jq +ZWN0IiwKICAgICAgICAgICAgImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjogewogICAgICAgICAgICAgICAg +ImFueU9mIjogWwogICAgICAgICAgICAgICAgICAgIHsgIiRyZWYiOiAiIyIgfSwKICAgICAgICAgICAg +ICAgICAgICB7ICIkcmVmIjogIiMvZGVmaW5pdGlvbnMvc3RyaW5nQXJyYXkiIH0KICAgICAgICAgICAg +ICAgIF0KICAgICAgICAgICAgfQogICAgICAgIH0sCiAgICAgICAgImVudW0iOiB7CiAgICAgICAgICAg +ICJ0eXBlIjogImFycmF5IiwKICAgICAgICAgICAgIm1pbkl0ZW1zIjogMSwKICAgICAgICAgICAgInVu +aXF1ZUl0ZW1zIjogdHJ1ZQogICAgICAgIH0sCiAgICAgICAgInR5cGUiOiB7CiAgICAgICAgICAgICJh +bnlPZiI6IFsKICAgICAgICAgICAgICAgIHsgIiRyZWYiOiAiIy9kZWZpbml0aW9ucy9zaW1wbGVUeXBl +cyIgfSwKICAgICAgICAgICAgICAgIHsKICAgICAgICAgICAgICAgICAgICAidHlwZSI6ICJhcnJheSIs +CiAgICAgICAgICAgICAgICAgICAgIml0ZW1zIjogeyAiJHJlZiI6ICIjL2RlZmluaXRpb25zL3NpbXBs +ZVR5cGVzIiB9LAogICAgICAgICAgICAgICAgICAgICJtaW5JdGVtcyI6IDEsCiAgICAgICAgICAgICAg +ICAgICAgInVuaXF1ZUl0ZW1zIjogdHJ1ZQogICAgICAgICAgICAgICAgfQogICAgICAgICAgICBdCiAg +ICAgICAgfSwKICAgICAgICAiYWxsT2YiOiB7ICIkcmVmIjogIiMvZGVmaW5pdGlvbnMvc2NoZW1hQXJy +YXkiIH0sCiAgICAgICAgImFueU9mIjogeyAiJHJlZiI6ICIjL2RlZmluaXRpb25zL3NjaGVtYUFycmF5 +IiB9LAogICAgICAgICJvbmVPZiI6IHsgIiRyZWYiOiAiIy9kZWZpbml0aW9ucy9zY2hlbWFBcnJheSIg +fSwKICAgICAgICAibm90IjogeyAiJHJlZiI6ICIjIiB9CiAgICB9LAogICAgImRlcGVuZGVuY2llcyI6 +IHsKICAgICAgICAiZXhjbHVzaXZlTWF4aW11bSI6IFsgIm1heGltdW0iIF0sCiAgICAgICAgImV4Y2x1 +c2l2ZU1pbmltdW0iOiBbICJtaW5pbXVtIiBdCiAgICB9LAogICAgImRlZmF1bHQiOiB7fQp9Cg==`)} diff --git a/vendor/github.com/google/gnostic-models/jsonschema/display.go b/vendor/github.com/google/gnostic-models/jsonschema/display.go new file mode 100644 index 00000000..028a760a --- /dev/null +++ b/vendor/github.com/google/gnostic-models/jsonschema/display.go @@ -0,0 +1,229 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "fmt" + "strings" +) + +// +// DISPLAY +// The following methods display Schemas. +// + +// Description returns a string representation of a string or string array. +func (s *StringOrStringArray) Description() string { + if s.String != nil { + return *s.String + } + if s.StringArray != nil { + return strings.Join(*s.StringArray, ", ") + } + return "" +} + +// Returns a string representation of a Schema. +func (schema *Schema) String() string { + return schema.describeSchema("") +} + +// Helper: Returns a string representation of a Schema indented by a specified string. +func (schema *Schema) describeSchema(indent string) string { + result := "" + if schema.Schema != nil { + result += indent + "$schema: " + *(schema.Schema) + "\n" + } + if schema.ID != nil { + result += indent + "id: " + *(schema.ID) + "\n" + } + if schema.MultipleOf != nil { + result += indent + fmt.Sprintf("multipleOf: %+v\n", *(schema.MultipleOf)) + } + if schema.Maximum != nil { + result += indent + fmt.Sprintf("maximum: %+v\n", *(schema.Maximum)) + } + if schema.ExclusiveMaximum != nil { + result += indent + fmt.Sprintf("exclusiveMaximum: %+v\n", *(schema.ExclusiveMaximum)) + } + if schema.Minimum != nil { + result += indent + fmt.Sprintf("minimum: %+v\n", *(schema.Minimum)) + } + if schema.ExclusiveMinimum != nil { + result += indent + fmt.Sprintf("exclusiveMinimum: %+v\n", *(schema.ExclusiveMinimum)) + } + if schema.MaxLength != nil { + result += indent + fmt.Sprintf("maxLength: %+v\n", *(schema.MaxLength)) + } + if schema.MinLength != nil { + result += indent + fmt.Sprintf("minLength: %+v\n", *(schema.MinLength)) + } + if schema.Pattern != nil { + result += indent + fmt.Sprintf("pattern: %+v\n", *(schema.Pattern)) + } + if schema.AdditionalItems != nil { + s := schema.AdditionalItems.Schema + if s != nil { + result += indent + "additionalItems:\n" + result += s.describeSchema(indent + " ") + } else { + b := *(schema.AdditionalItems.Boolean) + result += indent + fmt.Sprintf("additionalItems: %+v\n", b) + } + } + if schema.Items != nil { + result += indent + "items:\n" + items := schema.Items + if items.SchemaArray != nil { + for i, s := range *(items.SchemaArray) { + result += indent + " " + fmt.Sprintf("%d", i) + ":\n" + result += s.describeSchema(indent + " " + " ") + } + } else if items.Schema != nil { + result += items.Schema.describeSchema(indent + " " + " ") + } + } + if schema.MaxItems != nil { + result += indent + fmt.Sprintf("maxItems: %+v\n", *(schema.MaxItems)) + } + if schema.MinItems != nil { + result += indent + fmt.Sprintf("minItems: %+v\n", *(schema.MinItems)) + } + if schema.UniqueItems != nil { + result += indent + fmt.Sprintf("uniqueItems: %+v\n", *(schema.UniqueItems)) + } + if schema.MaxProperties != nil { + result += indent + fmt.Sprintf("maxProperties: %+v\n", *(schema.MaxProperties)) + } + if schema.MinProperties != nil { + result += indent + fmt.Sprintf("minProperties: %+v\n", *(schema.MinProperties)) + } + if schema.Required != nil { + result += indent + fmt.Sprintf("required: %+v\n", *(schema.Required)) + } + if schema.AdditionalProperties != nil { + s := schema.AdditionalProperties.Schema + if s != nil { + result += indent + "additionalProperties:\n" + result += s.describeSchema(indent + " ") + } else { + b := *(schema.AdditionalProperties.Boolean) + result += indent + fmt.Sprintf("additionalProperties: %+v\n", b) + } + } + if schema.Properties != nil { + result += indent + "properties:\n" + for _, pair := range *(schema.Properties) { + name := pair.Name + s := pair.Value + result += indent + " " + name + ":\n" + result += s.describeSchema(indent + " " + " ") + } + } + if schema.PatternProperties != nil { + result += indent + "patternProperties:\n" + for _, pair := range *(schema.PatternProperties) { + name := pair.Name + s := pair.Value + result += indent + " " + name + ":\n" + result += s.describeSchema(indent + " " + " ") + } + } + if schema.Dependencies != nil { + result += indent + "dependencies:\n" + for _, pair := range *(schema.Dependencies) { + name := pair.Name + schemaOrStringArray := pair.Value + s := schemaOrStringArray.Schema + if s != nil { + result += indent + " " + name + ":\n" + result += s.describeSchema(indent + " " + " ") + } else { + a := schemaOrStringArray.StringArray + if a != nil { + result += indent + " " + name + ":\n" + for _, s2 := range *a { + result += indent + " " + " " + s2 + "\n" + } + } + } + + } + } + if schema.Enumeration != nil { + result += indent + "enumeration:\n" + for _, value := range *(schema.Enumeration) { + if value.String != nil { + result += indent + " " + fmt.Sprintf("%+v\n", *value.String) + } else { + result += indent + " " + fmt.Sprintf("%+v\n", *value.Bool) + } + } + } + if schema.Type != nil { + result += indent + fmt.Sprintf("type: %+v\n", schema.Type.Description()) + } + if schema.AllOf != nil { + result += indent + "allOf:\n" + for _, s := range *(schema.AllOf) { + result += s.describeSchema(indent + " ") + result += indent + "-\n" + } + } + if schema.AnyOf != nil { + result += indent + "anyOf:\n" + for _, s := range *(schema.AnyOf) { + result += s.describeSchema(indent + " ") + result += indent + "-\n" + } + } + if schema.OneOf != nil { + result += indent + "oneOf:\n" + for _, s := range *(schema.OneOf) { + result += s.describeSchema(indent + " ") + result += indent + "-\n" + } + } + if schema.Not != nil { + result += indent + "not:\n" + result += schema.Not.describeSchema(indent + " ") + } + if schema.Definitions != nil { + result += indent + "definitions:\n" + for _, pair := range *(schema.Definitions) { + name := pair.Name + s := pair.Value + result += indent + " " + name + ":\n" + result += s.describeSchema(indent + " " + " ") + } + } + if schema.Title != nil { + result += indent + "title: " + *(schema.Title) + "\n" + } + if schema.Description != nil { + result += indent + "description: " + *(schema.Description) + "\n" + } + if schema.Default != nil { + result += indent + "default:\n" + result += indent + fmt.Sprintf(" %+v\n", *(schema.Default)) + } + if schema.Format != nil { + result += indent + "format: " + *(schema.Format) + "\n" + } + if schema.Ref != nil { + result += indent + "$ref: " + *(schema.Ref) + "\n" + } + return result +} diff --git a/vendor/github.com/google/gnostic-models/jsonschema/models.go b/vendor/github.com/google/gnostic-models/jsonschema/models.go new file mode 100644 index 00000000..4781bdc5 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/jsonschema/models.go @@ -0,0 +1,228 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package jsonschema supports the reading, writing, and manipulation +// of JSON Schemas. +package jsonschema + +import "gopkg.in/yaml.v3" + +// The Schema struct models a JSON Schema and, because schemas are +// defined hierarchically, contains many references to itself. +// All fields are pointers and are nil if the associated values +// are not specified. +type Schema struct { + Schema *string // $schema + ID *string // id keyword used for $ref resolution scope + Ref *string // $ref, i.e. JSON Pointers + + // http://json-schema.org/latest/json-schema-validation.html + // 5.1. Validation keywords for numeric instances (number and integer) + MultipleOf *SchemaNumber + Maximum *SchemaNumber + ExclusiveMaximum *bool + Minimum *SchemaNumber + ExclusiveMinimum *bool + + // 5.2. Validation keywords for strings + MaxLength *int64 + MinLength *int64 + Pattern *string + + // 5.3. Validation keywords for arrays + AdditionalItems *SchemaOrBoolean + Items *SchemaOrSchemaArray + MaxItems *int64 + MinItems *int64 + UniqueItems *bool + + // 5.4. Validation keywords for objects + MaxProperties *int64 + MinProperties *int64 + Required *[]string + AdditionalProperties *SchemaOrBoolean + Properties *[]*NamedSchema + PatternProperties *[]*NamedSchema + Dependencies *[]*NamedSchemaOrStringArray + + // 5.5. Validation keywords for any instance type + Enumeration *[]SchemaEnumValue + Type *StringOrStringArray + AllOf *[]*Schema + AnyOf *[]*Schema + OneOf *[]*Schema + Not *Schema + Definitions *[]*NamedSchema + + // 6. Metadata keywords + Title *string + Description *string + Default *yaml.Node + + // 7. Semantic validation with "format" + Format *string +} + +// These helper structs represent "combination" types that generally can +// have values of one type or another. All are used to represent parts +// of Schemas. + +// SchemaNumber represents a value that can be either an Integer or a Float. +type SchemaNumber struct { + Integer *int64 + Float *float64 +} + +// NewSchemaNumberWithInteger creates and returns a new object +func NewSchemaNumberWithInteger(i int64) *SchemaNumber { + result := &SchemaNumber{} + result.Integer = &i + return result +} + +// NewSchemaNumberWithFloat creates and returns a new object +func NewSchemaNumberWithFloat(f float64) *SchemaNumber { + result := &SchemaNumber{} + result.Float = &f + return result +} + +// SchemaOrBoolean represents a value that can be either a Schema or a Boolean. +type SchemaOrBoolean struct { + Schema *Schema + Boolean *bool +} + +// NewSchemaOrBooleanWithSchema creates and returns a new object +func NewSchemaOrBooleanWithSchema(s *Schema) *SchemaOrBoolean { + result := &SchemaOrBoolean{} + result.Schema = s + return result +} + +// NewSchemaOrBooleanWithBoolean creates and returns a new object +func NewSchemaOrBooleanWithBoolean(b bool) *SchemaOrBoolean { + result := &SchemaOrBoolean{} + result.Boolean = &b + return result +} + +// StringOrStringArray represents a value that can be either +// a String or an Array of Strings. +type StringOrStringArray struct { + String *string + StringArray *[]string +} + +// NewStringOrStringArrayWithString creates and returns a new object +func NewStringOrStringArrayWithString(s string) *StringOrStringArray { + result := &StringOrStringArray{} + result.String = &s + return result +} + +// NewStringOrStringArrayWithStringArray creates and returns a new object +func NewStringOrStringArrayWithStringArray(a []string) *StringOrStringArray { + result := &StringOrStringArray{} + result.StringArray = &a + return result +} + +// SchemaOrStringArray represents a value that can be either +// a Schema or an Array of Strings. +type SchemaOrStringArray struct { + Schema *Schema + StringArray *[]string +} + +// SchemaOrSchemaArray represents a value that can be either +// a Schema or an Array of Schemas. +type SchemaOrSchemaArray struct { + Schema *Schema + SchemaArray *[]*Schema +} + +// NewSchemaOrSchemaArrayWithSchema creates and returns a new object +func NewSchemaOrSchemaArrayWithSchema(s *Schema) *SchemaOrSchemaArray { + result := &SchemaOrSchemaArray{} + result.Schema = s + return result +} + +// NewSchemaOrSchemaArrayWithSchemaArray creates and returns a new object +func NewSchemaOrSchemaArrayWithSchemaArray(a []*Schema) *SchemaOrSchemaArray { + result := &SchemaOrSchemaArray{} + result.SchemaArray = &a + return result +} + +// SchemaEnumValue represents a value that can be part of an +// enumeration in a Schema. +type SchemaEnumValue struct { + String *string + Bool *bool +} + +// NamedSchema is a name-value pair that is used to emulate maps +// with ordered keys. +type NamedSchema struct { + Name string + Value *Schema +} + +// NewNamedSchema creates and returns a new object +func NewNamedSchema(name string, value *Schema) *NamedSchema { + return &NamedSchema{Name: name, Value: value} +} + +// NamedSchemaOrStringArray is a name-value pair that is used +// to emulate maps with ordered keys. +type NamedSchemaOrStringArray struct { + Name string + Value *SchemaOrStringArray +} + +// Access named subschemas by name + +func namedSchemaArrayElementWithName(array *[]*NamedSchema, name string) *Schema { + if array == nil { + return nil + } + for _, pair := range *array { + if pair.Name == name { + return pair.Value + } + } + return nil +} + +// PropertyWithName returns the selected element. +func (s *Schema) PropertyWithName(name string) *Schema { + return namedSchemaArrayElementWithName(s.Properties, name) +} + +// PatternPropertyWithName returns the selected element. +func (s *Schema) PatternPropertyWithName(name string) *Schema { + return namedSchemaArrayElementWithName(s.PatternProperties, name) +} + +// DefinitionWithName returns the selected element. +func (s *Schema) DefinitionWithName(name string) *Schema { + return namedSchemaArrayElementWithName(s.Definitions, name) +} + +// AddProperty adds a named property. +func (s *Schema) AddProperty(name string, property *Schema) { + *s.Properties = append(*s.Properties, NewNamedSchema(name, property)) +} diff --git a/vendor/github.com/google/gnostic-models/jsonschema/operations.go b/vendor/github.com/google/gnostic-models/jsonschema/operations.go new file mode 100644 index 00000000..ba8dd4a9 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/jsonschema/operations.go @@ -0,0 +1,394 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "fmt" + "log" + "strings" +) + +// +// OPERATIONS +// The following methods perform operations on Schemas. +// + +// IsEmpty returns true if no members of the Schema are specified. +func (schema *Schema) IsEmpty() bool { + return (schema.Schema == nil) && + (schema.ID == nil) && + (schema.MultipleOf == nil) && + (schema.Maximum == nil) && + (schema.ExclusiveMaximum == nil) && + (schema.Minimum == nil) && + (schema.ExclusiveMinimum == nil) && + (schema.MaxLength == nil) && + (schema.MinLength == nil) && + (schema.Pattern == nil) && + (schema.AdditionalItems == nil) && + (schema.Items == nil) && + (schema.MaxItems == nil) && + (schema.MinItems == nil) && + (schema.UniqueItems == nil) && + (schema.MaxProperties == nil) && + (schema.MinProperties == nil) && + (schema.Required == nil) && + (schema.AdditionalProperties == nil) && + (schema.Properties == nil) && + (schema.PatternProperties == nil) && + (schema.Dependencies == nil) && + (schema.Enumeration == nil) && + (schema.Type == nil) && + (schema.AllOf == nil) && + (schema.AnyOf == nil) && + (schema.OneOf == nil) && + (schema.Not == nil) && + (schema.Definitions == nil) && + (schema.Title == nil) && + (schema.Description == nil) && + (schema.Default == nil) && + (schema.Format == nil) && + (schema.Ref == nil) +} + +// IsEqual returns true if two schemas are equal. +func (schema *Schema) IsEqual(schema2 *Schema) bool { + return schema.String() == schema2.String() +} + +// SchemaOperation represents a function that can be applied to a Schema. +type SchemaOperation func(schema *Schema, context string) + +// Applies a specified function to a Schema and all of the Schemas that it contains. +func (schema *Schema) applyToSchemas(operation SchemaOperation, context string) { + + if schema.AdditionalItems != nil { + s := schema.AdditionalItems.Schema + if s != nil { + s.applyToSchemas(operation, "AdditionalItems") + } + } + + if schema.Items != nil { + if schema.Items.SchemaArray != nil { + for _, s := range *(schema.Items.SchemaArray) { + s.applyToSchemas(operation, "Items.SchemaArray") + } + } else if schema.Items.Schema != nil { + schema.Items.Schema.applyToSchemas(operation, "Items.Schema") + } + } + + if schema.AdditionalProperties != nil { + s := schema.AdditionalProperties.Schema + if s != nil { + s.applyToSchemas(operation, "AdditionalProperties") + } + } + + if schema.Properties != nil { + for _, pair := range *(schema.Properties) { + s := pair.Value + s.applyToSchemas(operation, "Properties") + } + } + if schema.PatternProperties != nil { + for _, pair := range *(schema.PatternProperties) { + s := pair.Value + s.applyToSchemas(operation, "PatternProperties") + } + } + + if schema.Dependencies != nil { + for _, pair := range *(schema.Dependencies) { + schemaOrStringArray := pair.Value + s := schemaOrStringArray.Schema + if s != nil { + s.applyToSchemas(operation, "Dependencies") + } + } + } + + if schema.AllOf != nil { + for _, s := range *(schema.AllOf) { + s.applyToSchemas(operation, "AllOf") + } + } + if schema.AnyOf != nil { + for _, s := range *(schema.AnyOf) { + s.applyToSchemas(operation, "AnyOf") + } + } + if schema.OneOf != nil { + for _, s := range *(schema.OneOf) { + s.applyToSchemas(operation, "OneOf") + } + } + if schema.Not != nil { + schema.Not.applyToSchemas(operation, "Not") + } + + if schema.Definitions != nil { + for _, pair := range *(schema.Definitions) { + s := pair.Value + s.applyToSchemas(operation, "Definitions") + } + } + + operation(schema, context) +} + +// CopyProperties copies all non-nil properties from the source Schema to the schema Schema. +func (schema *Schema) CopyProperties(source *Schema) { + if source.Schema != nil { + schema.Schema = source.Schema + } + if source.ID != nil { + schema.ID = source.ID + } + if source.MultipleOf != nil { + schema.MultipleOf = source.MultipleOf + } + if source.Maximum != nil { + schema.Maximum = source.Maximum + } + if source.ExclusiveMaximum != nil { + schema.ExclusiveMaximum = source.ExclusiveMaximum + } + if source.Minimum != nil { + schema.Minimum = source.Minimum + } + if source.ExclusiveMinimum != nil { + schema.ExclusiveMinimum = source.ExclusiveMinimum + } + if source.MaxLength != nil { + schema.MaxLength = source.MaxLength + } + if source.MinLength != nil { + schema.MinLength = source.MinLength + } + if source.Pattern != nil { + schema.Pattern = source.Pattern + } + if source.AdditionalItems != nil { + schema.AdditionalItems = source.AdditionalItems + } + if source.Items != nil { + schema.Items = source.Items + } + if source.MaxItems != nil { + schema.MaxItems = source.MaxItems + } + if source.MinItems != nil { + schema.MinItems = source.MinItems + } + if source.UniqueItems != nil { + schema.UniqueItems = source.UniqueItems + } + if source.MaxProperties != nil { + schema.MaxProperties = source.MaxProperties + } + if source.MinProperties != nil { + schema.MinProperties = source.MinProperties + } + if source.Required != nil { + schema.Required = source.Required + } + if source.AdditionalProperties != nil { + schema.AdditionalProperties = source.AdditionalProperties + } + if source.Properties != nil { + schema.Properties = source.Properties + } + if source.PatternProperties != nil { + schema.PatternProperties = source.PatternProperties + } + if source.Dependencies != nil { + schema.Dependencies = source.Dependencies + } + if source.Enumeration != nil { + schema.Enumeration = source.Enumeration + } + if source.Type != nil { + schema.Type = source.Type + } + if source.AllOf != nil { + schema.AllOf = source.AllOf + } + if source.AnyOf != nil { + schema.AnyOf = source.AnyOf + } + if source.OneOf != nil { + schema.OneOf = source.OneOf + } + if source.Not != nil { + schema.Not = source.Not + } + if source.Definitions != nil { + schema.Definitions = source.Definitions + } + if source.Title != nil { + schema.Title = source.Title + } + if source.Description != nil { + schema.Description = source.Description + } + if source.Default != nil { + schema.Default = source.Default + } + if source.Format != nil { + schema.Format = source.Format + } + if source.Ref != nil { + schema.Ref = source.Ref + } +} + +// TypeIs returns true if the Type of a Schema includes the specified type +func (schema *Schema) TypeIs(typeName string) bool { + if schema.Type != nil { + // the schema Type is either a string or an array of strings + if schema.Type.String != nil { + return (*(schema.Type.String) == typeName) + } else if schema.Type.StringArray != nil { + for _, n := range *(schema.Type.StringArray) { + if n == typeName { + return true + } + } + } + } + return false +} + +// ResolveRefs resolves "$ref" elements in a Schema and its children. +// But if a reference refers to an object type, is inside a oneOf, or contains a oneOf, +// the reference is kept and we expect downstream tools to separately model these +// referenced schemas. +func (schema *Schema) ResolveRefs() { + rootSchema := schema + count := 1 + for count > 0 { + count = 0 + schema.applyToSchemas( + func(schema *Schema, context string) { + if schema.Ref != nil { + resolvedRef, err := rootSchema.resolveJSONPointer(*(schema.Ref)) + if err != nil { + log.Printf("%+v", err) + } else if resolvedRef.TypeIs("object") { + // don't substitute for objects, we'll model the referenced schema with a class + } else if context == "OneOf" { + // don't substitute for references inside oneOf declarations + } else if resolvedRef.OneOf != nil { + // don't substitute for references that contain oneOf declarations + } else if resolvedRef.AdditionalProperties != nil { + // don't substitute for references that look like objects + } else { + schema.Ref = nil + schema.CopyProperties(resolvedRef) + count++ + } + } + }, "") + } +} + +// resolveJSONPointer resolves JSON pointers. +// This current implementation is very crude and custom for OpenAPI 2.0 schemas. +// It panics for any pointer that it is unable to resolve. +func (schema *Schema) resolveJSONPointer(ref string) (result *Schema, err error) { + parts := strings.Split(ref, "#") + if len(parts) == 2 { + documentName := parts[0] + "#" + if documentName == "#" && schema.ID != nil { + documentName = *(schema.ID) + } + path := parts[1] + document := schemas[documentName] + pathParts := strings.Split(path, "/") + + // we currently do a very limited (hard-coded) resolution of certain paths and log errors for missed cases + if len(pathParts) == 1 { + return document, nil + } else if len(pathParts) == 3 { + switch pathParts[1] { + case "definitions": + dictionary := document.Definitions + for _, pair := range *dictionary { + if pair.Name == pathParts[2] { + result = pair.Value + } + } + case "properties": + dictionary := document.Properties + for _, pair := range *dictionary { + if pair.Name == pathParts[2] { + result = pair.Value + } + } + default: + break + } + } + } + if result == nil { + return nil, fmt.Errorf("unresolved pointer: %+v", ref) + } + return result, nil +} + +// ResolveAllOfs replaces "allOf" elements by merging their properties into the parent Schema. +func (schema *Schema) ResolveAllOfs() { + schema.applyToSchemas( + func(schema *Schema, context string) { + if schema.AllOf != nil { + for _, allOf := range *(schema.AllOf) { + schema.CopyProperties(allOf) + } + schema.AllOf = nil + } + }, "resolveAllOfs") +} + +// ResolveAnyOfs replaces all "anyOf" elements with "oneOf". +func (schema *Schema) ResolveAnyOfs() { + schema.applyToSchemas( + func(schema *Schema, context string) { + if schema.AnyOf != nil { + schema.OneOf = schema.AnyOf + schema.AnyOf = nil + } + }, "resolveAnyOfs") +} + +// return a pointer to a copy of a passed-in string +func stringptr(input string) (output *string) { + return &input +} + +// CopyOfficialSchemaProperty copies a named property from the official JSON Schema definition +func (schema *Schema) CopyOfficialSchemaProperty(name string) { + *schema.Properties = append(*schema.Properties, + NewNamedSchema(name, + &Schema{Ref: stringptr("http://json-schema.org/draft-04/schema#/properties/" + name)})) +} + +// CopyOfficialSchemaProperties copies named properties from the official JSON Schema definition +func (schema *Schema) CopyOfficialSchemaProperties(names []string) { + for _, name := range names { + schema.CopyOfficialSchemaProperty(name) + } +} diff --git a/vendor/github.com/google/gnostic-models/jsonschema/reader.go b/vendor/github.com/google/gnostic-models/jsonschema/reader.go new file mode 100644 index 00000000..b8583d46 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/jsonschema/reader.go @@ -0,0 +1,442 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go run generate-base.go + +package jsonschema + +import ( + "fmt" + "io/ioutil" + "strconv" + + "gopkg.in/yaml.v3" +) + +// This is a global map of all known Schemas. +// It is initialized when the first Schema is created and inserted. +var schemas map[string]*Schema + +// NewBaseSchema builds a schema object from an embedded json representation. +func NewBaseSchema() (schema *Schema, err error) { + b, err := baseSchemaBytes() + if err != nil { + return nil, err + } + var node yaml.Node + err = yaml.Unmarshal(b, &node) + if err != nil { + return nil, err + } + return NewSchemaFromObject(&node), nil +} + +// NewSchemaFromFile reads a schema from a file. +// Currently this assumes that schemas are stored in the source distribution of this project. +func NewSchemaFromFile(filename string) (schema *Schema, err error) { + file, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + var node yaml.Node + err = yaml.Unmarshal(file, &node) + if err != nil { + return nil, err + } + return NewSchemaFromObject(&node), nil +} + +// NewSchemaFromObject constructs a schema from a parsed JSON object. +// Due to the complexity of the schema representation, this is a +// custom reader and not the standard Go JSON reader (encoding/json). +func NewSchemaFromObject(jsonData *yaml.Node) *Schema { + switch jsonData.Kind { + case yaml.DocumentNode: + return NewSchemaFromObject(jsonData.Content[0]) + case yaml.MappingNode: + schema := &Schema{} + + for i := 0; i < len(jsonData.Content); i += 2 { + k := jsonData.Content[i].Value + v := jsonData.Content[i+1] + + switch k { + case "$schema": + schema.Schema = schema.stringValue(v) + case "id": + schema.ID = schema.stringValue(v) + + case "multipleOf": + schema.MultipleOf = schema.numberValue(v) + case "maximum": + schema.Maximum = schema.numberValue(v) + case "exclusiveMaximum": + schema.ExclusiveMaximum = schema.boolValue(v) + case "minimum": + schema.Minimum = schema.numberValue(v) + case "exclusiveMinimum": + schema.ExclusiveMinimum = schema.boolValue(v) + + case "maxLength": + schema.MaxLength = schema.intValue(v) + case "minLength": + schema.MinLength = schema.intValue(v) + case "pattern": + schema.Pattern = schema.stringValue(v) + + case "additionalItems": + schema.AdditionalItems = schema.schemaOrBooleanValue(v) + case "items": + schema.Items = schema.schemaOrSchemaArrayValue(v) + case "maxItems": + schema.MaxItems = schema.intValue(v) + case "minItems": + schema.MinItems = schema.intValue(v) + case "uniqueItems": + schema.UniqueItems = schema.boolValue(v) + + case "maxProperties": + schema.MaxProperties = schema.intValue(v) + case "minProperties": + schema.MinProperties = schema.intValue(v) + case "required": + schema.Required = schema.arrayOfStringsValue(v) + case "additionalProperties": + schema.AdditionalProperties = schema.schemaOrBooleanValue(v) + case "properties": + schema.Properties = schema.mapOfSchemasValue(v) + case "patternProperties": + schema.PatternProperties = schema.mapOfSchemasValue(v) + case "dependencies": + schema.Dependencies = schema.mapOfSchemasOrStringArraysValue(v) + + case "enum": + schema.Enumeration = schema.arrayOfEnumValuesValue(v) + + case "type": + schema.Type = schema.stringOrStringArrayValue(v) + case "allOf": + schema.AllOf = schema.arrayOfSchemasValue(v) + case "anyOf": + schema.AnyOf = schema.arrayOfSchemasValue(v) + case "oneOf": + schema.OneOf = schema.arrayOfSchemasValue(v) + case "not": + schema.Not = NewSchemaFromObject(v) + case "definitions": + schema.Definitions = schema.mapOfSchemasValue(v) + + case "title": + schema.Title = schema.stringValue(v) + case "description": + schema.Description = schema.stringValue(v) + + case "default": + schema.Default = v + + case "format": + schema.Format = schema.stringValue(v) + case "$ref": + schema.Ref = schema.stringValue(v) + default: + fmt.Printf("UNSUPPORTED (%s)\n", k) + } + } + + // insert schema in global map + if schema.ID != nil { + if schemas == nil { + schemas = make(map[string]*Schema, 0) + } + schemas[*(schema.ID)] = schema + } + return schema + + default: + fmt.Printf("schemaValue: unexpected node %+v\n", jsonData) + return nil + } + + return nil +} + +// +// BUILDERS +// The following methods build elements of Schemas from interface{} values. +// Each returns nil if it is unable to build the desired element. +// + +// Gets the string value of an interface{} value if possible. +func (schema *Schema) stringValue(v *yaml.Node) *string { + switch v.Kind { + case yaml.ScalarNode: + return &v.Value + default: + fmt.Printf("stringValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets the numeric value of an interface{} value if possible. +func (schema *Schema) numberValue(v *yaml.Node) *SchemaNumber { + number := &SchemaNumber{} + switch v.Kind { + case yaml.ScalarNode: + switch v.Tag { + case "!!float": + v2, _ := strconv.ParseFloat(v.Value, 64) + number.Float = &v2 + return number + case "!!int": + v2, _ := strconv.ParseInt(v.Value, 10, 64) + number.Integer = &v2 + return number + default: + fmt.Printf("stringValue: unexpected node %+v\n", v) + } + default: + fmt.Printf("stringValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets the integer value of an interface{} value if possible. +func (schema *Schema) intValue(v *yaml.Node) *int64 { + switch v.Kind { + case yaml.ScalarNode: + switch v.Tag { + case "!!float": + v2, _ := strconv.ParseFloat(v.Value, 64) + v3 := int64(v2) + return &v3 + case "!!int": + v2, _ := strconv.ParseInt(v.Value, 10, 64) + return &v2 + default: + fmt.Printf("intValue: unexpected node %+v\n", v) + } + default: + fmt.Printf("intValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets the bool value of an interface{} value if possible. +func (schema *Schema) boolValue(v *yaml.Node) *bool { + switch v.Kind { + case yaml.ScalarNode: + switch v.Tag { + case "!!bool": + v2, _ := strconv.ParseBool(v.Value) + return &v2 + default: + fmt.Printf("boolValue: unexpected node %+v\n", v) + } + default: + fmt.Printf("boolValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets a map of Schemas from an interface{} value if possible. +func (schema *Schema) mapOfSchemasValue(v *yaml.Node) *[]*NamedSchema { + switch v.Kind { + case yaml.MappingNode: + m := make([]*NamedSchema, 0) + for i := 0; i < len(v.Content); i += 2 { + k2 := v.Content[i].Value + v2 := v.Content[i+1] + pair := &NamedSchema{Name: k2, Value: NewSchemaFromObject(v2)} + m = append(m, pair) + } + return &m + default: + fmt.Printf("mapOfSchemasValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets an array of Schemas from an interface{} value if possible. +func (schema *Schema) arrayOfSchemasValue(v *yaml.Node) *[]*Schema { + switch v.Kind { + case yaml.SequenceNode: + m := make([]*Schema, 0) + for _, v2 := range v.Content { + switch v2.Kind { + case yaml.MappingNode: + s := NewSchemaFromObject(v2) + m = append(m, s) + default: + fmt.Printf("arrayOfSchemasValue: unexpected node %+v\n", v2) + } + } + return &m + case yaml.MappingNode: + m := make([]*Schema, 0) + s := NewSchemaFromObject(v) + m = append(m, s) + return &m + default: + fmt.Printf("arrayOfSchemasValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets a Schema or an array of Schemas from an interface{} value if possible. +func (schema *Schema) schemaOrSchemaArrayValue(v *yaml.Node) *SchemaOrSchemaArray { + switch v.Kind { + case yaml.SequenceNode: + m := make([]*Schema, 0) + for _, v2 := range v.Content { + switch v2.Kind { + case yaml.MappingNode: + s := NewSchemaFromObject(v2) + m = append(m, s) + default: + fmt.Printf("schemaOrSchemaArrayValue: unexpected node %+v\n", v2) + } + } + return &SchemaOrSchemaArray{SchemaArray: &m} + case yaml.MappingNode: + s := NewSchemaFromObject(v) + return &SchemaOrSchemaArray{Schema: s} + default: + fmt.Printf("schemaOrSchemaArrayValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets an array of strings from an interface{} value if possible. +func (schema *Schema) arrayOfStringsValue(v *yaml.Node) *[]string { + switch v.Kind { + case yaml.ScalarNode: + a := []string{v.Value} + return &a + case yaml.SequenceNode: + a := make([]string, 0) + for _, v2 := range v.Content { + switch v2.Kind { + case yaml.ScalarNode: + a = append(a, v2.Value) + default: + fmt.Printf("arrayOfStringsValue: unexpected node %+v\n", v2) + } + } + return &a + default: + fmt.Printf("arrayOfStringsValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets a string or an array of strings from an interface{} value if possible. +func (schema *Schema) stringOrStringArrayValue(v *yaml.Node) *StringOrStringArray { + switch v.Kind { + case yaml.ScalarNode: + s := &StringOrStringArray{} + s.String = &v.Value + return s + case yaml.SequenceNode: + a := make([]string, 0) + for _, v2 := range v.Content { + switch v2.Kind { + case yaml.ScalarNode: + a = append(a, v2.Value) + default: + fmt.Printf("arrayOfStringsValue: unexpected node %+v\n", v2) + } + } + s := &StringOrStringArray{} + s.StringArray = &a + return s + default: + fmt.Printf("arrayOfStringsValue: unexpected node %+v\n", v) + } + return nil +} + +// Gets an array of enum values from an interface{} value if possible. +func (schema *Schema) arrayOfEnumValuesValue(v *yaml.Node) *[]SchemaEnumValue { + a := make([]SchemaEnumValue, 0) + switch v.Kind { + case yaml.SequenceNode: + for _, v2 := range v.Content { + switch v2.Kind { + case yaml.ScalarNode: + switch v2.Tag { + case "!!str": + a = append(a, SchemaEnumValue{String: &v2.Value}) + case "!!bool": + v3, _ := strconv.ParseBool(v2.Value) + a = append(a, SchemaEnumValue{Bool: &v3}) + default: + fmt.Printf("arrayOfEnumValuesValue: unexpected type %s\n", v2.Tag) + } + default: + fmt.Printf("arrayOfEnumValuesValue: unexpected node %+v\n", v2) + } + } + default: + fmt.Printf("arrayOfEnumValuesValue: unexpected node %+v\n", v) + } + return &a +} + +// Gets a map of schemas or string arrays from an interface{} value if possible. +func (schema *Schema) mapOfSchemasOrStringArraysValue(v *yaml.Node) *[]*NamedSchemaOrStringArray { + m := make([]*NamedSchemaOrStringArray, 0) + switch v.Kind { + case yaml.MappingNode: + for i := 0; i < len(v.Content); i += 2 { + k2 := v.Content[i].Value + v2 := v.Content[i+1] + switch v2.Kind { + case yaml.SequenceNode: + a := make([]string, 0) + for _, v3 := range v2.Content { + switch v3.Kind { + case yaml.ScalarNode: + a = append(a, v3.Value) + default: + fmt.Printf("mapOfSchemasOrStringArraysValue: unexpected node %+v\n", v3) + } + } + s := &SchemaOrStringArray{} + s.StringArray = &a + pair := &NamedSchemaOrStringArray{Name: k2, Value: s} + m = append(m, pair) + default: + fmt.Printf("mapOfSchemasOrStringArraysValue: unexpected node %+v\n", v2) + } + } + default: + fmt.Printf("mapOfSchemasOrStringArraysValue: unexpected node %+v\n", v) + } + return &m +} + +// Gets a schema or a boolean value from an interface{} value if possible. +func (schema *Schema) schemaOrBooleanValue(v *yaml.Node) *SchemaOrBoolean { + schemaOrBoolean := &SchemaOrBoolean{} + switch v.Kind { + case yaml.ScalarNode: + v2, _ := strconv.ParseBool(v.Value) + schemaOrBoolean.Boolean = &v2 + case yaml.MappingNode: + schemaOrBoolean.Schema = NewSchemaFromObject(v) + default: + fmt.Printf("schemaOrBooleanValue: unexpected node %+v\n", v) + } + return schemaOrBoolean +} diff --git a/vendor/github.com/google/gnostic-models/jsonschema/schema.json b/vendor/github.com/google/gnostic-models/jsonschema/schema.json new file mode 100644 index 00000000..85eb502a --- /dev/null +++ b/vendor/github.com/google/gnostic-models/jsonschema/schema.json @@ -0,0 +1,150 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uri" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {} +} diff --git a/vendor/github.com/google/gnostic-models/jsonschema/writer.go b/vendor/github.com/google/gnostic-models/jsonschema/writer.go new file mode 100644 index 00000000..340dc5f9 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/jsonschema/writer.go @@ -0,0 +1,369 @@ +// Copyright 2017 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jsonschema + +import ( + "fmt" + + "gopkg.in/yaml.v3" +) + +const indentation = " " + +func renderMappingNode(node *yaml.Node, indent string) (result string) { + result = "{\n" + innerIndent := indent + indentation + for i := 0; i < len(node.Content); i += 2 { + // first print the key + key := node.Content[i].Value + result += fmt.Sprintf("%s\"%+v\": ", innerIndent, key) + // then the value + value := node.Content[i+1] + switch value.Kind { + case yaml.ScalarNode: + result += "\"" + value.Value + "\"" + case yaml.MappingNode: + result += renderMappingNode(value, innerIndent) + case yaml.SequenceNode: + result += renderSequenceNode(value, innerIndent) + default: + result += fmt.Sprintf("???MapItem(Key:%+v, Value:%T)", value, value) + } + if i < len(node.Content)-2 { + result += "," + } + result += "\n" + } + + result += indent + "}" + return result +} + +func renderSequenceNode(node *yaml.Node, indent string) (result string) { + result = "[\n" + innerIndent := indent + indentation + for i := 0; i < len(node.Content); i++ { + item := node.Content[i] + switch item.Kind { + case yaml.ScalarNode: + result += innerIndent + "\"" + item.Value + "\"" + case yaml.MappingNode: + result += innerIndent + renderMappingNode(item, innerIndent) + "" + default: + result += innerIndent + fmt.Sprintf("???ArrayItem(%+v)", item) + } + if i < len(node.Content)-1 { + result += "," + } + result += "\n" + } + result += indent + "]" + return result +} + +func renderStringArray(array []string, indent string) (result string) { + result = "[\n" + innerIndent := indent + indentation + for i, item := range array { + result += innerIndent + "\"" + item + "\"" + if i < len(array)-1 { + result += "," + } + result += "\n" + } + result += indent + "]" + return result +} + +// Render renders a yaml.Node as JSON +func Render(node *yaml.Node) string { + if node.Kind == yaml.DocumentNode { + if len(node.Content) == 1 { + return Render(node.Content[0]) + } + } else if node.Kind == yaml.MappingNode { + return renderMappingNode(node, "") + "\n" + } else if node.Kind == yaml.SequenceNode { + return renderSequenceNode(node, "") + "\n" + } + return "" +} + +func (object *SchemaNumber) nodeValue() *yaml.Node { + if object.Integer != nil { + return nodeForInt64(*object.Integer) + } else if object.Float != nil { + return nodeForFloat64(*object.Float) + } else { + return nil + } +} + +func (object *SchemaOrBoolean) nodeValue() *yaml.Node { + if object.Schema != nil { + return object.Schema.nodeValue() + } else if object.Boolean != nil { + return nodeForBoolean(*object.Boolean) + } else { + return nil + } +} + +func nodeForStringArray(array []string) *yaml.Node { + content := make([]*yaml.Node, 0) + for _, item := range array { + content = append(content, nodeForString(item)) + } + return nodeForSequence(content) +} + +func nodeForSchemaArray(array []*Schema) *yaml.Node { + content := make([]*yaml.Node, 0) + for _, item := range array { + content = append(content, item.nodeValue()) + } + return nodeForSequence(content) +} + +func (object *StringOrStringArray) nodeValue() *yaml.Node { + if object.String != nil { + return nodeForString(*object.String) + } else if object.StringArray != nil { + return nodeForStringArray(*(object.StringArray)) + } else { + return nil + } +} + +func (object *SchemaOrStringArray) nodeValue() *yaml.Node { + if object.Schema != nil { + return object.Schema.nodeValue() + } else if object.StringArray != nil { + return nodeForStringArray(*(object.StringArray)) + } else { + return nil + } +} + +func (object *SchemaOrSchemaArray) nodeValue() *yaml.Node { + if object.Schema != nil { + return object.Schema.nodeValue() + } else if object.SchemaArray != nil { + return nodeForSchemaArray(*(object.SchemaArray)) + } else { + return nil + } +} + +func (object *SchemaEnumValue) nodeValue() *yaml.Node { + if object.String != nil { + return nodeForString(*object.String) + } else if object.Bool != nil { + return nodeForBoolean(*object.Bool) + } else { + return nil + } +} + +func nodeForNamedSchemaArray(array *[]*NamedSchema) *yaml.Node { + content := make([]*yaml.Node, 0) + for _, pair := range *(array) { + content = appendPair(content, pair.Name, pair.Value.nodeValue()) + } + return nodeForMapping(content) +} + +func nodeForNamedSchemaOrStringArray(array *[]*NamedSchemaOrStringArray) *yaml.Node { + content := make([]*yaml.Node, 0) + for _, pair := range *(array) { + content = appendPair(content, pair.Name, pair.Value.nodeValue()) + } + return nodeForMapping(content) +} + +func nodeForSchemaEnumArray(array *[]SchemaEnumValue) *yaml.Node { + content := make([]*yaml.Node, 0) + for _, item := range *array { + content = append(content, item.nodeValue()) + } + return nodeForSequence(content) +} + +func nodeForMapping(content []*yaml.Node) *yaml.Node { + return &yaml.Node{ + Kind: yaml.MappingNode, + Content: content, + } +} + +func nodeForSequence(content []*yaml.Node) *yaml.Node { + return &yaml.Node{ + Kind: yaml.SequenceNode, + Content: content, + } +} + +func nodeForString(value string) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: value, + } +} + +func nodeForBoolean(value bool) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!bool", + Value: fmt.Sprintf("%t", value), + } +} + +func nodeForInt64(value int64) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!int", + Value: fmt.Sprintf("%d", value), + } +} + +func nodeForFloat64(value float64) *yaml.Node { + return &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!float", + Value: fmt.Sprintf("%f", value), + } +} + +func appendPair(nodes []*yaml.Node, name string, value *yaml.Node) []*yaml.Node { + nodes = append(nodes, nodeForString(name)) + nodes = append(nodes, value) + return nodes +} + +func (schema *Schema) nodeValue() *yaml.Node { + n := &yaml.Node{Kind: yaml.MappingNode} + content := make([]*yaml.Node, 0) + if schema.Title != nil { + content = appendPair(content, "title", nodeForString(*schema.Title)) + } + if schema.ID != nil { + content = appendPair(content, "id", nodeForString(*schema.ID)) + } + if schema.Schema != nil { + content = appendPair(content, "$schema", nodeForString(*schema.Schema)) + } + if schema.Type != nil { + content = appendPair(content, "type", schema.Type.nodeValue()) + } + if schema.Items != nil { + content = appendPair(content, "items", schema.Items.nodeValue()) + } + if schema.Description != nil { + content = appendPair(content, "description", nodeForString(*schema.Description)) + } + if schema.Required != nil { + content = appendPair(content, "required", nodeForStringArray(*schema.Required)) + } + if schema.AdditionalProperties != nil { + content = appendPair(content, "additionalProperties", schema.AdditionalProperties.nodeValue()) + } + if schema.PatternProperties != nil { + content = appendPair(content, "patternProperties", nodeForNamedSchemaArray(schema.PatternProperties)) + } + if schema.Properties != nil { + content = appendPair(content, "properties", nodeForNamedSchemaArray(schema.Properties)) + } + if schema.Dependencies != nil { + content = appendPair(content, "dependencies", nodeForNamedSchemaOrStringArray(schema.Dependencies)) + } + if schema.Ref != nil { + content = appendPair(content, "$ref", nodeForString(*schema.Ref)) + } + if schema.MultipleOf != nil { + content = appendPair(content, "multipleOf", schema.MultipleOf.nodeValue()) + } + if schema.Maximum != nil { + content = appendPair(content, "maximum", schema.Maximum.nodeValue()) + } + if schema.ExclusiveMaximum != nil { + content = appendPair(content, "exclusiveMaximum", nodeForBoolean(*schema.ExclusiveMaximum)) + } + if schema.Minimum != nil { + content = appendPair(content, "minimum", schema.Minimum.nodeValue()) + } + if schema.ExclusiveMinimum != nil { + content = appendPair(content, "exclusiveMinimum", nodeForBoolean(*schema.ExclusiveMinimum)) + } + if schema.MaxLength != nil { + content = appendPair(content, "maxLength", nodeForInt64(*schema.MaxLength)) + } + if schema.MinLength != nil { + content = appendPair(content, "minLength", nodeForInt64(*schema.MinLength)) + } + if schema.Pattern != nil { + content = appendPair(content, "pattern", nodeForString(*schema.Pattern)) + } + if schema.AdditionalItems != nil { + content = appendPair(content, "additionalItems", schema.AdditionalItems.nodeValue()) + } + if schema.MaxItems != nil { + content = appendPair(content, "maxItems", nodeForInt64(*schema.MaxItems)) + } + if schema.MinItems != nil { + content = appendPair(content, "minItems", nodeForInt64(*schema.MinItems)) + } + if schema.UniqueItems != nil { + content = appendPair(content, "uniqueItems", nodeForBoolean(*schema.UniqueItems)) + } + if schema.MaxProperties != nil { + content = appendPair(content, "maxProperties", nodeForInt64(*schema.MaxProperties)) + } + if schema.MinProperties != nil { + content = appendPair(content, "minProperties", nodeForInt64(*schema.MinProperties)) + } + if schema.Enumeration != nil { + content = appendPair(content, "enum", nodeForSchemaEnumArray(schema.Enumeration)) + } + if schema.AllOf != nil { + content = appendPair(content, "allOf", nodeForSchemaArray(*schema.AllOf)) + } + if schema.AnyOf != nil { + content = appendPair(content, "anyOf", nodeForSchemaArray(*schema.AnyOf)) + } + if schema.OneOf != nil { + content = appendPair(content, "oneOf", nodeForSchemaArray(*schema.OneOf)) + } + if schema.Not != nil { + content = appendPair(content, "not", schema.Not.nodeValue()) + } + if schema.Definitions != nil { + content = appendPair(content, "definitions", nodeForNamedSchemaArray(schema.Definitions)) + } + if schema.Default != nil { + // m = append(m, yaml.MapItem{Key: "default", Value: *schema.Default}) + } + if schema.Format != nil { + content = appendPair(content, "format", nodeForString(*schema.Format)) + } + n.Content = content + return n +} + +// JSONString returns a json representation of a schema. +func (schema *Schema) JSONString() string { + node := schema.nodeValue() + return Render(node) +} diff --git a/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.go b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.go new file mode 100644 index 00000000..d71fe6d5 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.go @@ -0,0 +1,8820 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +package openapi_v2 + +import ( + "fmt" + "regexp" + "strings" + + "gopkg.in/yaml.v3" + + "github.com/google/gnostic-models/compiler" +) + +// Version returns the package name (and OpenAPI version). +func Version() string { + return "openapi_v2" +} + +// NewAdditionalPropertiesItem creates an object of type AdditionalPropertiesItem if possible, returning an error if not. +func NewAdditionalPropertiesItem(in *yaml.Node, context *compiler.Context) (*AdditionalPropertiesItem, error) { + errors := make([]error, 0) + x := &AdditionalPropertiesItem{} + matched := false + // Schema schema = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchema(m, compiler.NewContext("schema", m, context)) + if matchingError == nil { + x.Oneof = &AdditionalPropertiesItem_Schema{Schema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // bool boolean = 2; + boolValue, ok := compiler.BoolForScalarNode(in) + if ok { + x.Oneof = &AdditionalPropertiesItem_Boolean{Boolean: boolValue} + matched = true + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid AdditionalPropertiesItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewAny creates an object of type Any if possible, returning an error if not. +func NewAny(in *yaml.Node, context *compiler.Context) (*Any, error) { + errors := make([]error, 0) + x := &Any{} + bytes := compiler.Marshal(in) + x.Yaml = string(bytes) + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewApiKeySecurity creates an object of type ApiKeySecurity if possible, returning an error if not. +func NewApiKeySecurity(in *yaml.Node, context *compiler.Context) (*ApiKeySecurity, error) { + errors := make([]error, 0) + x := &ApiKeySecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "in", "name", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [apiKey] + if ok && !compiler.StringArrayContainsValue([]string{"apiKey"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 2; + v2 := compiler.MapValueForKey(m, "name") + if v2 != nil { + x.Name, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 3; + v3 := compiler.MapValueForKey(m, "in") + if v3 != nil { + x.In, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [header query] + if ok && !compiler.StringArrayContainsValue([]string{"header", "query"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 4; + v4 := compiler.MapValueForKey(m, "description") + if v4 != nil { + x.Description, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 5; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewBasicAuthenticationSecurity creates an object of type BasicAuthenticationSecurity if possible, returning an error if not. +func NewBasicAuthenticationSecurity(in *yaml.Node, context *compiler.Context) (*BasicAuthenticationSecurity, error) { + errors := make([]error, 0) + x := &BasicAuthenticationSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [basic] + if ok && !compiler.StringArrayContainsValue([]string{"basic"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewBodyParameter creates an object of type BodyParameter if possible, returning an error if not. +func NewBodyParameter(in *yaml.Node, context *compiler.Context) (*BodyParameter, error) { + errors := make([]error, 0) + x := &BodyParameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "schema"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "in", "name", "required", "schema"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 2; + v2 := compiler.MapValueForKey(m, "name") + if v2 != nil { + x.Name, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 3; + v3 := compiler.MapValueForKey(m, "in") + if v3 != nil { + x.In, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [body] + if ok && !compiler.StringArrayContainsValue([]string{"body"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool required = 4; + v4 := compiler.MapValueForKey(m, "required") + if v4 != nil { + x.Required, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schema schema = 5; + v5 := compiler.MapValueForKey(m, "schema") + if v5 != nil { + var err error + x.Schema, err = NewSchema(v5, compiler.NewContext("schema", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewContact creates an object of type Contact if possible, returning an error if not. +func NewContact(in *yaml.Node, context *compiler.Context) (*Contact, error) { + errors := make([]error, 0) + x := &Contact{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"email", "name", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string email = 3; + v3 := compiler.MapValueForKey(m, "email") + if v3 != nil { + x.Email, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for email: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 4; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDefault creates an object of type Default if possible, returning an error if not. +func NewDefault(in *yaml.Node, context *compiler.Context) (*Default, error) { + errors := make([]error, 0) + x := &Default{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDefinitions creates an object of type Definitions if possible, returning an error if not. +func NewDefinitions(in *yaml.Node, context *compiler.Context) (*Definitions, error) { + errors := make([]error, 0) + x := &Definitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchema additional_properties = 1; + // MAP: Schema + x.AdditionalProperties = make([]*NamedSchema, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSchema{} + pair.Name = k + var err error + pair.Value, err = NewSchema(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewDocument creates an object of type Document if possible, returning an error if not. +func NewDocument(in *yaml.Node, context *compiler.Context) (*Document, error) { + errors := make([]error, 0) + x := &Document{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"info", "paths", "swagger"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"basePath", "consumes", "definitions", "externalDocs", "host", "info", "parameters", "paths", "produces", "responses", "schemes", "security", "securityDefinitions", "swagger", "tags"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string swagger = 1; + v1 := compiler.MapValueForKey(m, "swagger") + if v1 != nil { + x.Swagger, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for swagger: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [2.0] + if ok && !compiler.StringArrayContainsValue([]string{"2.0"}, x.Swagger) { + message := fmt.Sprintf("has unexpected value for swagger: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Info info = 2; + v2 := compiler.MapValueForKey(m, "info") + if v2 != nil { + var err error + x.Info, err = NewInfo(v2, compiler.NewContext("info", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // string host = 3; + v3 := compiler.MapValueForKey(m, "host") + if v3 != nil { + x.Host, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for host: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string base_path = 4; + v4 := compiler.MapValueForKey(m, "basePath") + if v4 != nil { + x.BasePath, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for basePath: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string schemes = 5; + v5 := compiler.MapValueForKey(m, "schemes") + if v5 != nil { + v, ok := compiler.SequenceNodeForNode(v5) + if ok { + x.Schemes = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for schemes: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [http https ws wss] + if ok && !compiler.StringArrayContainsValues([]string{"http", "https", "ws", "wss"}, x.Schemes) { + message := fmt.Sprintf("has unexpected value for schemes: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string consumes = 6; + v6 := compiler.MapValueForKey(m, "consumes") + if v6 != nil { + v, ok := compiler.SequenceNodeForNode(v6) + if ok { + x.Consumes = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for consumes: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string produces = 7; + v7 := compiler.MapValueForKey(m, "produces") + if v7 != nil { + v, ok := compiler.SequenceNodeForNode(v7) + if ok { + x.Produces = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for produces: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Paths paths = 8; + v8 := compiler.MapValueForKey(m, "paths") + if v8 != nil { + var err error + x.Paths, err = NewPaths(v8, compiler.NewContext("paths", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // Definitions definitions = 9; + v9 := compiler.MapValueForKey(m, "definitions") + if v9 != nil { + var err error + x.Definitions, err = NewDefinitions(v9, compiler.NewContext("definitions", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // ParameterDefinitions parameters = 10; + v10 := compiler.MapValueForKey(m, "parameters") + if v10 != nil { + var err error + x.Parameters, err = NewParameterDefinitions(v10, compiler.NewContext("parameters", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // ResponseDefinitions responses = 11; + v11 := compiler.MapValueForKey(m, "responses") + if v11 != nil { + var err error + x.Responses, err = NewResponseDefinitions(v11, compiler.NewContext("responses", v11, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated SecurityRequirement security = 12; + v12 := compiler.MapValueForKey(m, "security") + if v12 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := compiler.SequenceNodeForNode(v12) + if ok { + for _, item := range a.Content { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // SecurityDefinitions security_definitions = 13; + v13 := compiler.MapValueForKey(m, "securityDefinitions") + if v13 != nil { + var err error + x.SecurityDefinitions, err = NewSecurityDefinitions(v13, compiler.NewContext("securityDefinitions", v13, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Tag tags = 14; + v14 := compiler.MapValueForKey(m, "tags") + if v14 != nil { + // repeated Tag + x.Tags = make([]*Tag, 0) + a, ok := compiler.SequenceNodeForNode(v14) + if ok { + for _, item := range a.Content { + y, err := NewTag(item, compiler.NewContext("tags", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Tags = append(x.Tags, y) + } + } + } + // ExternalDocs external_docs = 15; + v15 := compiler.MapValueForKey(m, "externalDocs") + if v15 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v15, compiler.NewContext("externalDocs", v15, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 16; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExamples creates an object of type Examples if possible, returning an error if not. +func NewExamples(in *yaml.Node, context *compiler.Context) (*Examples, error) { + errors := make([]error, 0) + x := &Examples{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewExternalDocs creates an object of type ExternalDocs if possible, returning an error if not. +func NewExternalDocs(in *yaml.Node, context *compiler.Context) (*ExternalDocs, error) { + errors := make([]error, 0) + x := &ExternalDocs{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"url"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewFileSchema creates an object of type FileSchema if possible, returning an error if not. +func NewFileSchema(in *yaml.Node, context *compiler.Context) (*FileSchema, error) { + errors := make([]error, 0) + x := &FileSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"default", "description", "example", "externalDocs", "format", "readOnly", "required", "title", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string format = 1; + v1 := compiler.MapValueForKey(m, "format") + if v1 != nil { + x.Format, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 2; + v2 := compiler.MapValueForKey(m, "title") + if v2 != nil { + x.Title, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 4; + v4 := compiler.MapValueForKey(m, "default") + if v4 != nil { + var err error + x.Default, err = NewAny(v4, compiler.NewContext("default", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string required = 5; + v5 := compiler.MapValueForKey(m, "required") + if v5 != nil { + v, ok := compiler.SequenceNodeForNode(v5) + if ok { + x.Required = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [file] + if ok && !compiler.StringArrayContainsValue([]string{"file"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool read_only = 7; + v7 := compiler.MapValueForKey(m, "readOnly") + if v7 != nil { + x.ReadOnly, ok = compiler.BoolForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 8; + v8 := compiler.MapValueForKey(m, "externalDocs") + if v8 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v8, compiler.NewContext("externalDocs", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 9; + v9 := compiler.MapValueForKey(m, "example") + if v9 != nil { + var err error + x.Example, err = NewAny(v9, compiler.NewContext("example", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 10; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewFormDataParameterSubSchema creates an object of type FormDataParameterSubSchema if possible, returning an error if not. +func NewFormDataParameterSubSchema(in *yaml.Node, context *compiler.Context) (*FormDataParameterSubSchema, error) { + errors := make([]error, 0) + x := &FormDataParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowEmptyValue", "collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [formData] + if ok && !compiler.StringArrayContainsValue([]string{"formData"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 5; + v5 := compiler.MapValueForKey(m, "allowEmptyValue") + if v5 != nil { + x.AllowEmptyValue, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array file] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array", "file"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 7; + v7 := compiler.MapValueForKey(m, "format") + if v7 != nil { + x.Format, ok = compiler.StringForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 8; + v8 := compiler.MapValueForKey(m, "items") + if v8 != nil { + var err error + x.Items, err = NewPrimitivesItems(v8, compiler.NewContext("items", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 9; + v9 := compiler.MapValueForKey(m, "collectionFormat") + if v9 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes multi] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes", "multi"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 10; + v10 := compiler.MapValueForKey(m, "default") + if v10 != nil { + var err error + x.Default, err = NewAny(v10, compiler.NewContext("default", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 11; + v11 := compiler.MapValueForKey(m, "maximum") + if v11 != nil { + v, ok := compiler.FloatForScalarNode(v11) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 12; + v12 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v12 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 13; + v13 := compiler.MapValueForKey(m, "minimum") + if v13 != nil { + v, ok := compiler.FloatForScalarNode(v13) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 14; + v14 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v14 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v14) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 15; + v15 := compiler.MapValueForKey(m, "maxLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 16; + v16 := compiler.MapValueForKey(m, "minLength") + if v16 != nil { + t, ok := compiler.IntForScalarNode(v16) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 17; + v17 := compiler.MapValueForKey(m, "pattern") + if v17 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v17) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 18; + v18 := compiler.MapValueForKey(m, "maxItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 19; + v19 := compiler.MapValueForKey(m, "minItems") + if v19 != nil { + t, ok := compiler.IntForScalarNode(v19) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 20; + v20 := compiler.MapValueForKey(m, "uniqueItems") + if v20 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v20) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v20)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 21; + v21 := compiler.MapValueForKey(m, "enum") + if v21 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v21) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 22; + v22 := compiler.MapValueForKey(m, "multipleOf") + if v22 != nil { + v, ok := compiler.FloatForScalarNode(v22) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v22)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 23; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeader creates an object of type Header if possible, returning an error if not. +func NewHeader(in *yaml.Node, context *compiler.Context) (*Header, error) { + errors := make([]error, 0) + x := &Header{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "pattern", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number integer boolean array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "integer", "boolean", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 3; + v3 := compiler.MapValueForKey(m, "items") + if v3 != nil { + var err error + x.Items, err = NewPrimitivesItems(v3, compiler.NewContext("items", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 4; + v4 := compiler.MapValueForKey(m, "collectionFormat") + if v4 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 6; + v6 := compiler.MapValueForKey(m, "maximum") + if v6 != nil { + v, ok := compiler.FloatForScalarNode(v6) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 7; + v7 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v7 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 8; + v8 := compiler.MapValueForKey(m, "minimum") + if v8 != nil { + v, ok := compiler.FloatForScalarNode(v8) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 9; + v9 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v9 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 10; + v10 := compiler.MapValueForKey(m, "maxLength") + if v10 != nil { + t, ok := compiler.IntForScalarNode(v10) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 11; + v11 := compiler.MapValueForKey(m, "minLength") + if v11 != nil { + t, ok := compiler.IntForScalarNode(v11) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 12; + v12 := compiler.MapValueForKey(m, "pattern") + if v12 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 13; + v13 := compiler.MapValueForKey(m, "maxItems") + if v13 != nil { + t, ok := compiler.IntForScalarNode(v13) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 14; + v14 := compiler.MapValueForKey(m, "minItems") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 15; + v15 := compiler.MapValueForKey(m, "uniqueItems") + if v15 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v15) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 16; + v16 := compiler.MapValueForKey(m, "enum") + if v16 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v16) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 17; + v17 := compiler.MapValueForKey(m, "multipleOf") + if v17 != nil { + v, ok := compiler.FloatForScalarNode(v17) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 18; + v18 := compiler.MapValueForKey(m, "description") + if v18 != nil { + x.Description, ok = compiler.StringForScalarNode(v18) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 19; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeaderParameterSubSchema creates an object of type HeaderParameterSubSchema if possible, returning an error if not. +func NewHeaderParameterSubSchema(in *yaml.Node, context *compiler.Context) (*HeaderParameterSubSchema, error) { + errors := make([]error, 0) + x := &HeaderParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [header] + if ok && !compiler.StringArrayContainsValue([]string{"header"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 5; + v5 := compiler.MapValueForKey(m, "type") + if v5 != nil { + x.Type, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 6; + v6 := compiler.MapValueForKey(m, "format") + if v6 != nil { + x.Format, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 7; + v7 := compiler.MapValueForKey(m, "items") + if v7 != nil { + var err error + x.Items, err = NewPrimitivesItems(v7, compiler.NewContext("items", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 8; + v8 := compiler.MapValueForKey(m, "collectionFormat") + if v8 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 9; + v9 := compiler.MapValueForKey(m, "default") + if v9 != nil { + var err error + x.Default, err = NewAny(v9, compiler.NewContext("default", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 10; + v10 := compiler.MapValueForKey(m, "maximum") + if v10 != nil { + v, ok := compiler.FloatForScalarNode(v10) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 11; + v11 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v11 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v11) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 12; + v12 := compiler.MapValueForKey(m, "minimum") + if v12 != nil { + v, ok := compiler.FloatForScalarNode(v12) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 13; + v13 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v13 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v13) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 14; + v14 := compiler.MapValueForKey(m, "maxLength") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 15; + v15 := compiler.MapValueForKey(m, "minLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 16; + v16 := compiler.MapValueForKey(m, "pattern") + if v16 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v16) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 17; + v17 := compiler.MapValueForKey(m, "maxItems") + if v17 != nil { + t, ok := compiler.IntForScalarNode(v17) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 18; + v18 := compiler.MapValueForKey(m, "minItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 19; + v19 := compiler.MapValueForKey(m, "uniqueItems") + if v19 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v19) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v20) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 21; + v21 := compiler.MapValueForKey(m, "multipleOf") + if v21 != nil { + v, ok := compiler.FloatForScalarNode(v21) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v21)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 22; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewHeaders creates an object of type Headers if possible, returning an error if not. +func NewHeaders(in *yaml.Node, context *compiler.Context) (*Headers, error) { + errors := make([]error, 0) + x := &Headers{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedHeader additional_properties = 1; + // MAP: Header + x.AdditionalProperties = make([]*NamedHeader, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedHeader{} + pair.Name = k + var err error + pair.Value, err = NewHeader(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewInfo creates an object of type Info if possible, returning an error if not. +func NewInfo(in *yaml.Node, context *compiler.Context) (*Info, error) { + errors := make([]error, 0) + x := &Info{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"title", "version"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"contact", "description", "license", "termsOfService", "title", "version"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string title = 1; + v1 := compiler.MapValueForKey(m, "title") + if v1 != nil { + x.Title, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string version = 2; + v2 := compiler.MapValueForKey(m, "version") + if v2 != nil { + x.Version, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for version: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string terms_of_service = 4; + v4 := compiler.MapValueForKey(m, "termsOfService") + if v4 != nil { + x.TermsOfService, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for termsOfService: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Contact contact = 5; + v5 := compiler.MapValueForKey(m, "contact") + if v5 != nil { + var err error + x.Contact, err = NewContact(v5, compiler.NewContext("contact", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // License license = 6; + v6 := compiler.MapValueForKey(m, "license") + if v6 != nil { + var err error + x.License, err = NewLicense(v6, compiler.NewContext("license", v6, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 7; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewItemsItem creates an object of type ItemsItem if possible, returning an error if not. +func NewItemsItem(in *yaml.Node, context *compiler.Context) (*ItemsItem, error) { + errors := make([]error, 0) + x := &ItemsItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value for item array: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + x.Schema = make([]*Schema, 0) + y, err := NewSchema(m, compiler.NewContext("", m, context)) + if err != nil { + return nil, err + } + x.Schema = append(x.Schema, y) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewJsonReference creates an object of type JsonReference if possible, returning an error if not. +func NewJsonReference(in *yaml.Node, context *compiler.Context) (*JsonReference, error) { + errors := make([]error, 0) + x := &JsonReference{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"$ref"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewLicense creates an object of type License if possible, returning an error if not. +func NewLicense(in *yaml.Node, context *compiler.Context) (*License, error) { + errors := make([]error, 0) + x := &License{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"name", "url"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string url = 2; + v2 := compiler.MapValueForKey(m, "url") + if v2 != nil { + x.Url, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for url: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 3; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedAny creates an object of type NamedAny if possible, returning an error if not. +func NewNamedAny(in *yaml.Node, context *compiler.Context) (*NamedAny, error) { + errors := make([]error, 0) + x := &NamedAny{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewAny(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedHeader creates an object of type NamedHeader if possible, returning an error if not. +func NewNamedHeader(in *yaml.Node, context *compiler.Context) (*NamedHeader, error) { + errors := make([]error, 0) + x := &NamedHeader{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Header value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewHeader(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedParameter creates an object of type NamedParameter if possible, returning an error if not. +func NewNamedParameter(in *yaml.Node, context *compiler.Context) (*NamedParameter, error) { + errors := make([]error, 0) + x := &NamedParameter{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Parameter value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewParameter(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedPathItem creates an object of type NamedPathItem if possible, returning an error if not. +func NewNamedPathItem(in *yaml.Node, context *compiler.Context) (*NamedPathItem, error) { + errors := make([]error, 0) + x := &NamedPathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PathItem value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewPathItem(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResponse creates an object of type NamedResponse if possible, returning an error if not. +func NewNamedResponse(in *yaml.Node, context *compiler.Context) (*NamedResponse, error) { + errors := make([]error, 0) + x := &NamedResponse{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Response value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResponse(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedResponseValue creates an object of type NamedResponseValue if possible, returning an error if not. +func NewNamedResponseValue(in *yaml.Node, context *compiler.Context) (*NamedResponseValue, error) { + errors := make([]error, 0) + x := &NamedResponseValue{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ResponseValue value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewResponseValue(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSchema creates an object of type NamedSchema if possible, returning an error if not. +func NewNamedSchema(in *yaml.Node, context *compiler.Context) (*NamedSchema, error) { + errors := make([]error, 0) + x := &NamedSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Schema value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSchema(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedSecurityDefinitionsItem creates an object of type NamedSecurityDefinitionsItem if possible, returning an error if not. +func NewNamedSecurityDefinitionsItem(in *yaml.Node, context *compiler.Context) (*NamedSecurityDefinitionsItem, error) { + errors := make([]error, 0) + x := &NamedSecurityDefinitionsItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SecurityDefinitionsItem value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewSecurityDefinitionsItem(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedString creates an object of type NamedString if possible, returning an error if not. +func NewNamedString(in *yaml.Node, context *compiler.Context) (*NamedString, error) { + errors := make([]error, 0) + x := &NamedString{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + x.Value, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for value: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNamedStringArray creates an object of type NamedStringArray if possible, returning an error if not. +func NewNamedStringArray(in *yaml.Node, context *compiler.Context) (*NamedStringArray, error) { + errors := make([]error, 0) + x := &NamedStringArray{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"name", "value"} + var allowedPatterns []*regexp.Regexp + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // StringArray value = 2; + v2 := compiler.MapValueForKey(m, "value") + if v2 != nil { + var err error + x.Value, err = NewStringArray(v2, compiler.NewContext("value", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewNonBodyParameter creates an object of type NonBodyParameter if possible, returning an error if not. +func NewNonBodyParameter(in *yaml.Node, context *compiler.Context) (*NonBodyParameter, error) { + errors := make([]error, 0) + x := &NonBodyParameter{} + matched := false + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"in", "name", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // HeaderParameterSubSchema header_parameter_sub_schema = 1; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewHeaderParameterSubSchema(m, compiler.NewContext("headerParameterSubSchema", m, context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_HeaderParameterSubSchema{HeaderParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // FormDataParameterSubSchema form_data_parameter_sub_schema = 2; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewFormDataParameterSubSchema(m, compiler.NewContext("formDataParameterSubSchema", m, context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_FormDataParameterSubSchema{FormDataParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // QueryParameterSubSchema query_parameter_sub_schema = 3; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewQueryParameterSubSchema(m, compiler.NewContext("queryParameterSubSchema", m, context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_QueryParameterSubSchema{QueryParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + // PathParameterSubSchema path_parameter_sub_schema = 4; + { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewPathParameterSubSchema(m, compiler.NewContext("pathParameterSubSchema", m, context)) + if matchingError == nil { + x.Oneof = &NonBodyParameter_PathParameterSubSchema{PathParameterSubSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid NonBodyParameter") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2AccessCodeSecurity creates an object of type Oauth2AccessCodeSecurity if possible, returning an error if not. +func NewOauth2AccessCodeSecurity(in *yaml.Node, context *compiler.Context) (*Oauth2AccessCodeSecurity, error) { + errors := make([]error, 0) + x := &Oauth2AccessCodeSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"authorizationUrl", "flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"authorizationUrl", "description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [accessCode] + if ok && !compiler.StringArrayContainsValue([]string{"accessCode"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string authorization_url = 4; + v4 := compiler.MapValueForKey(m, "authorizationUrl") + if v4 != nil { + x.AuthorizationUrl, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for authorizationUrl: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string token_url = 5; + v5 := compiler.MapValueForKey(m, "tokenUrl") + if v5 != nil { + x.TokenUrl, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 6; + v6 := compiler.MapValueForKey(m, "description") + if v6 != nil { + x.Description, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 7; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2ApplicationSecurity creates an object of type Oauth2ApplicationSecurity if possible, returning an error if not. +func NewOauth2ApplicationSecurity(in *yaml.Node, context *compiler.Context) (*Oauth2ApplicationSecurity, error) { + errors := make([]error, 0) + x := &Oauth2ApplicationSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [application] + if ok && !compiler.StringArrayContainsValue([]string{"application"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string token_url = 4; + v4 := compiler.MapValueForKey(m, "tokenUrl") + if v4 != nil { + x.TokenUrl, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2ImplicitSecurity creates an object of type Oauth2ImplicitSecurity if possible, returning an error if not. +func NewOauth2ImplicitSecurity(in *yaml.Node, context *compiler.Context) (*Oauth2ImplicitSecurity, error) { + errors := make([]error, 0) + x := &Oauth2ImplicitSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"authorizationUrl", "flow", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"authorizationUrl", "description", "flow", "scopes", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [implicit] + if ok && !compiler.StringArrayContainsValue([]string{"implicit"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string authorization_url = 4; + v4 := compiler.MapValueForKey(m, "authorizationUrl") + if v4 != nil { + x.AuthorizationUrl, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for authorizationUrl: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2PasswordSecurity creates an object of type Oauth2PasswordSecurity if possible, returning an error if not. +func NewOauth2PasswordSecurity(in *yaml.Node, context *compiler.Context) (*Oauth2PasswordSecurity, error) { + errors := make([]error, 0) + x := &Oauth2PasswordSecurity{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"flow", "tokenUrl", "type"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "flow", "scopes", "tokenUrl", "type"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [oauth2] + if ok && !compiler.StringArrayContainsValue([]string{"oauth2"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string flow = 2; + v2 := compiler.MapValueForKey(m, "flow") + if v2 != nil { + x.Flow, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [password] + if ok && !compiler.StringArrayContainsValue([]string{"password"}, x.Flow) { + message := fmt.Sprintf("has unexpected value for flow: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Oauth2Scopes scopes = 3; + v3 := compiler.MapValueForKey(m, "scopes") + if v3 != nil { + var err error + x.Scopes, err = NewOauth2Scopes(v3, compiler.NewContext("scopes", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string token_url = 4; + v4 := compiler.MapValueForKey(m, "tokenUrl") + if v4 != nil { + x.TokenUrl, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for tokenUrl: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 5; + v5 := compiler.MapValueForKey(m, "description") + if v5 != nil { + x.Description, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOauth2Scopes creates an object of type Oauth2Scopes if possible, returning an error if not. +func NewOauth2Scopes(in *yaml.Node, context *compiler.Context) (*Oauth2Scopes, error) { + errors := make([]error, 0) + x := &Oauth2Scopes{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedString additional_properties = 1; + // MAP: string + x.AdditionalProperties = make([]*NamedString, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedString{} + pair.Name = k + pair.Value, _ = compiler.StringForScalarNode(v) + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewOperation creates an object of type Operation if possible, returning an error if not. +func NewOperation(in *yaml.Node, context *compiler.Context) (*Operation, error) { + errors := make([]error, 0) + x := &Operation{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"responses"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"consumes", "deprecated", "description", "externalDocs", "operationId", "parameters", "produces", "responses", "schemes", "security", "summary", "tags"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated string tags = 1; + v1 := compiler.MapValueForKey(m, "tags") + if v1 != nil { + v, ok := compiler.SequenceNodeForNode(v1) + if ok { + x.Tags = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for tags: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string summary = 2; + v2 := compiler.MapValueForKey(m, "summary") + if v2 != nil { + x.Summary, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for summary: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 4; + v4 := compiler.MapValueForKey(m, "externalDocs") + if v4 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v4, compiler.NewContext("externalDocs", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // string operation_id = 5; + v5 := compiler.MapValueForKey(m, "operationId") + if v5 != nil { + x.OperationId, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for operationId: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string produces = 6; + v6 := compiler.MapValueForKey(m, "produces") + if v6 != nil { + v, ok := compiler.SequenceNodeForNode(v6) + if ok { + x.Produces = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for produces: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string consumes = 7; + v7 := compiler.MapValueForKey(m, "consumes") + if v7 != nil { + v, ok := compiler.SequenceNodeForNode(v7) + if ok { + x.Consumes = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for consumes: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated ParametersItem parameters = 8; + v8 := compiler.MapValueForKey(m, "parameters") + if v8 != nil { + // repeated ParametersItem + x.Parameters = make([]*ParametersItem, 0) + a, ok := compiler.SequenceNodeForNode(v8) + if ok { + for _, item := range a.Content { + y, err := NewParametersItem(item, compiler.NewContext("parameters", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // Responses responses = 9; + v9 := compiler.MapValueForKey(m, "responses") + if v9 != nil { + var err error + x.Responses, err = NewResponses(v9, compiler.NewContext("responses", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated string schemes = 10; + v10 := compiler.MapValueForKey(m, "schemes") + if v10 != nil { + v, ok := compiler.SequenceNodeForNode(v10) + if ok { + x.Schemes = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for schemes: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [http https ws wss] + if ok && !compiler.StringArrayContainsValues([]string{"http", "https", "ws", "wss"}, x.Schemes) { + message := fmt.Sprintf("has unexpected value for schemes: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool deprecated = 11; + v11 := compiler.MapValueForKey(m, "deprecated") + if v11 != nil { + x.Deprecated, ok = compiler.BoolForScalarNode(v11) + if !ok { + message := fmt.Sprintf("has unexpected value for deprecated: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated SecurityRequirement security = 12; + v12 := compiler.MapValueForKey(m, "security") + if v12 != nil { + // repeated SecurityRequirement + x.Security = make([]*SecurityRequirement, 0) + a, ok := compiler.SequenceNodeForNode(v12) + if ok { + for _, item := range a.Content { + y, err := NewSecurityRequirement(item, compiler.NewContext("security", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Security = append(x.Security, y) + } + } + } + // repeated NamedAny vendor_extension = 13; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameter creates an object of type Parameter if possible, returning an error if not. +func NewParameter(in *yaml.Node, context *compiler.Context) (*Parameter, error) { + errors := make([]error, 0) + x := &Parameter{} + matched := false + // BodyParameter body_parameter = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewBodyParameter(m, compiler.NewContext("bodyParameter", m, context)) + if matchingError == nil { + x.Oneof = &Parameter_BodyParameter{BodyParameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // NonBodyParameter non_body_parameter = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewNonBodyParameter(m, compiler.NewContext("nonBodyParameter", m, context)) + if matchingError == nil { + x.Oneof = &Parameter_NonBodyParameter{NonBodyParameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid Parameter") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParameterDefinitions creates an object of type ParameterDefinitions if possible, returning an error if not. +func NewParameterDefinitions(in *yaml.Node, context *compiler.Context) (*ParameterDefinitions, error) { + errors := make([]error, 0) + x := &ParameterDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedParameter additional_properties = 1; + // MAP: Parameter + x.AdditionalProperties = make([]*NamedParameter, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedParameter{} + pair.Name = k + var err error + pair.Value, err = NewParameter(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewParametersItem creates an object of type ParametersItem if possible, returning an error if not. +func NewParametersItem(in *yaml.Node, context *compiler.Context) (*ParametersItem, error) { + errors := make([]error, 0) + x := &ParametersItem{} + matched := false + // Parameter parameter = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewParameter(m, compiler.NewContext("parameter", m, context)) + if matchingError == nil { + x.Oneof = &ParametersItem_Parameter{Parameter: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // JsonReference json_reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewJsonReference(m, compiler.NewContext("jsonReference", m, context)) + if matchingError == nil { + x.Oneof = &ParametersItem_JsonReference{JsonReference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid ParametersItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPathItem creates an object of type PathItem if possible, returning an error if not. +func NewPathItem(in *yaml.Node, context *compiler.Context) (*PathItem, error) { + errors := make([]error, 0) + x := &PathItem{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "delete", "get", "head", "options", "parameters", "patch", "post", "put"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Operation get = 2; + v2 := compiler.MapValueForKey(m, "get") + if v2 != nil { + var err error + x.Get, err = NewOperation(v2, compiler.NewContext("get", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation put = 3; + v3 := compiler.MapValueForKey(m, "put") + if v3 != nil { + var err error + x.Put, err = NewOperation(v3, compiler.NewContext("put", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation post = 4; + v4 := compiler.MapValueForKey(m, "post") + if v4 != nil { + var err error + x.Post, err = NewOperation(v4, compiler.NewContext("post", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation delete = 5; + v5 := compiler.MapValueForKey(m, "delete") + if v5 != nil { + var err error + x.Delete, err = NewOperation(v5, compiler.NewContext("delete", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation options = 6; + v6 := compiler.MapValueForKey(m, "options") + if v6 != nil { + var err error + x.Options, err = NewOperation(v6, compiler.NewContext("options", v6, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation head = 7; + v7 := compiler.MapValueForKey(m, "head") + if v7 != nil { + var err error + x.Head, err = NewOperation(v7, compiler.NewContext("head", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // Operation patch = 8; + v8 := compiler.MapValueForKey(m, "patch") + if v8 != nil { + var err error + x.Patch, err = NewOperation(v8, compiler.NewContext("patch", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated ParametersItem parameters = 9; + v9 := compiler.MapValueForKey(m, "parameters") + if v9 != nil { + // repeated ParametersItem + x.Parameters = make([]*ParametersItem, 0) + a, ok := compiler.SequenceNodeForNode(v9) + if ok { + for _, item := range a.Content { + y, err := NewParametersItem(item, compiler.NewContext("parameters", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Parameters = append(x.Parameters, y) + } + } + } + // repeated NamedAny vendor_extension = 10; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPathParameterSubSchema creates an object of type PathParameterSubSchema if possible, returning an error if not. +func NewPathParameterSubSchema(in *yaml.Node, context *compiler.Context) (*PathParameterSubSchema, error) { + errors := make([]error, 0) + x := &PathParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"required"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [path] + if ok && !compiler.StringArrayContainsValue([]string{"path"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 5; + v5 := compiler.MapValueForKey(m, "type") + if v5 != nil { + x.Type, ok = compiler.StringForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 6; + v6 := compiler.MapValueForKey(m, "format") + if v6 != nil { + x.Format, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 7; + v7 := compiler.MapValueForKey(m, "items") + if v7 != nil { + var err error + x.Items, err = NewPrimitivesItems(v7, compiler.NewContext("items", v7, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 8; + v8 := compiler.MapValueForKey(m, "collectionFormat") + if v8 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 9; + v9 := compiler.MapValueForKey(m, "default") + if v9 != nil { + var err error + x.Default, err = NewAny(v9, compiler.NewContext("default", v9, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 10; + v10 := compiler.MapValueForKey(m, "maximum") + if v10 != nil { + v, ok := compiler.FloatForScalarNode(v10) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 11; + v11 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v11 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v11) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 12; + v12 := compiler.MapValueForKey(m, "minimum") + if v12 != nil { + v, ok := compiler.FloatForScalarNode(v12) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 13; + v13 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v13 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v13) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 14; + v14 := compiler.MapValueForKey(m, "maxLength") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 15; + v15 := compiler.MapValueForKey(m, "minLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 16; + v16 := compiler.MapValueForKey(m, "pattern") + if v16 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v16) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 17; + v17 := compiler.MapValueForKey(m, "maxItems") + if v17 != nil { + t, ok := compiler.IntForScalarNode(v17) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 18; + v18 := compiler.MapValueForKey(m, "minItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 19; + v19 := compiler.MapValueForKey(m, "uniqueItems") + if v19 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v19) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v20) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 21; + v21 := compiler.MapValueForKey(m, "multipleOf") + if v21 != nil { + v, ok := compiler.FloatForScalarNode(v21) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v21)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 22; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPaths creates an object of type Paths if possible, returning an error if not. +func NewPaths(in *yaml.Node, context *compiler.Context) (*Paths, error) { + errors := make([]error, 0) + x := &Paths{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern0, pattern1} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedAny vendor_extension = 1; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + // repeated NamedPathItem path = 2; + // MAP: PathItem ^/ + x.Path = make([]*NamedPathItem, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "/") { + pair := &NamedPathItem{} + pair.Name = k + var err error + pair.Value, err = NewPathItem(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.Path = append(x.Path, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewPrimitivesItems creates an object of type PrimitivesItems if possible, returning an error if not. +func NewPrimitivesItems(in *yaml.Node, context *compiler.Context) (*PrimitivesItems, error) { + errors := make([]error, 0) + x := &PrimitivesItems{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"collectionFormat", "default", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "pattern", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string type = 1; + v1 := compiler.MapValueForKey(m, "type") + if v1 != nil { + x.Type, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number integer boolean array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "integer", "boolean", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 3; + v3 := compiler.MapValueForKey(m, "items") + if v3 != nil { + var err error + x.Items, err = NewPrimitivesItems(v3, compiler.NewContext("items", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 4; + v4 := compiler.MapValueForKey(m, "collectionFormat") + if v4 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 6; + v6 := compiler.MapValueForKey(m, "maximum") + if v6 != nil { + v, ok := compiler.FloatForScalarNode(v6) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 7; + v7 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v7 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 8; + v8 := compiler.MapValueForKey(m, "minimum") + if v8 != nil { + v, ok := compiler.FloatForScalarNode(v8) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 9; + v9 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v9 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 10; + v10 := compiler.MapValueForKey(m, "maxLength") + if v10 != nil { + t, ok := compiler.IntForScalarNode(v10) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 11; + v11 := compiler.MapValueForKey(m, "minLength") + if v11 != nil { + t, ok := compiler.IntForScalarNode(v11) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 12; + v12 := compiler.MapValueForKey(m, "pattern") + if v12 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 13; + v13 := compiler.MapValueForKey(m, "maxItems") + if v13 != nil { + t, ok := compiler.IntForScalarNode(v13) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 14; + v14 := compiler.MapValueForKey(m, "minItems") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 15; + v15 := compiler.MapValueForKey(m, "uniqueItems") + if v15 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v15) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 16; + v16 := compiler.MapValueForKey(m, "enum") + if v16 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v16) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 17; + v17 := compiler.MapValueForKey(m, "multipleOf") + if v17 != nil { + v, ok := compiler.FloatForScalarNode(v17) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 18; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewProperties creates an object of type Properties if possible, returning an error if not. +func NewProperties(in *yaml.Node, context *compiler.Context) (*Properties, error) { + errors := make([]error, 0) + x := &Properties{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSchema additional_properties = 1; + // MAP: Schema + x.AdditionalProperties = make([]*NamedSchema, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSchema{} + pair.Name = k + var err error + pair.Value, err = NewSchema(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewQueryParameterSubSchema creates an object of type QueryParameterSubSchema if possible, returning an error if not. +func NewQueryParameterSubSchema(in *yaml.Node, context *compiler.Context) (*QueryParameterSubSchema, error) { + errors := make([]error, 0) + x := &QueryParameterSubSchema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"allowEmptyValue", "collectionFormat", "default", "description", "enum", "exclusiveMaximum", "exclusiveMinimum", "format", "in", "items", "maxItems", "maxLength", "maximum", "minItems", "minLength", "minimum", "multipleOf", "name", "pattern", "required", "type", "uniqueItems"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // bool required = 1; + v1 := compiler.MapValueForKey(m, "required") + if v1 != nil { + x.Required, ok = compiler.BoolForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string in = 2; + v2 := compiler.MapValueForKey(m, "in") + if v2 != nil { + x.In, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [query] + if ok && !compiler.StringArrayContainsValue([]string{"query"}, x.In) { + message := fmt.Sprintf("has unexpected value for in: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 3; + v3 := compiler.MapValueForKey(m, "description") + if v3 != nil { + x.Description, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string name = 4; + v4 := compiler.MapValueForKey(m, "name") + if v4 != nil { + x.Name, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool allow_empty_value = 5; + v5 := compiler.MapValueForKey(m, "allowEmptyValue") + if v5 != nil { + x.AllowEmptyValue, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for allowEmptyValue: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string type = 6; + v6 := compiler.MapValueForKey(m, "type") + if v6 != nil { + x.Type, ok = compiler.StringForScalarNode(v6) + if !ok { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [string number boolean integer array] + if ok && !compiler.StringArrayContainsValue([]string{"string", "number", "boolean", "integer", "array"}, x.Type) { + message := fmt.Sprintf("has unexpected value for type: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 7; + v7 := compiler.MapValueForKey(m, "format") + if v7 != nil { + x.Format, ok = compiler.StringForScalarNode(v7) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // PrimitivesItems items = 8; + v8 := compiler.MapValueForKey(m, "items") + if v8 != nil { + var err error + x.Items, err = NewPrimitivesItems(v8, compiler.NewContext("items", v8, context)) + if err != nil { + errors = append(errors, err) + } + } + // string collection_format = 9; + v9 := compiler.MapValueForKey(m, "collectionFormat") + if v9 != nil { + x.CollectionFormat, ok = compiler.StringForScalarNode(v9) + if !ok { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + // check for valid enum values + // [csv ssv tsv pipes multi] + if ok && !compiler.StringArrayContainsValue([]string{"csv", "ssv", "tsv", "pipes", "multi"}, x.CollectionFormat) { + message := fmt.Sprintf("has unexpected value for collectionFormat: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 10; + v10 := compiler.MapValueForKey(m, "default") + if v10 != nil { + var err error + x.Default, err = NewAny(v10, compiler.NewContext("default", v10, context)) + if err != nil { + errors = append(errors, err) + } + } + // float maximum = 11; + v11 := compiler.MapValueForKey(m, "maximum") + if v11 != nil { + v, ok := compiler.FloatForScalarNode(v11) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 12; + v12 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v12 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v12) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 13; + v13 := compiler.MapValueForKey(m, "minimum") + if v13 != nil { + v, ok := compiler.FloatForScalarNode(v13) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 14; + v14 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v14 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v14) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 15; + v15 := compiler.MapValueForKey(m, "maxLength") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 16; + v16 := compiler.MapValueForKey(m, "minLength") + if v16 != nil { + t, ok := compiler.IntForScalarNode(v16) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 17; + v17 := compiler.MapValueForKey(m, "pattern") + if v17 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v17) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 18; + v18 := compiler.MapValueForKey(m, "maxItems") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 19; + v19 := compiler.MapValueForKey(m, "minItems") + if v19 != nil { + t, ok := compiler.IntForScalarNode(v19) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 20; + v20 := compiler.MapValueForKey(m, "uniqueItems") + if v20 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v20) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v20)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 21; + v21 := compiler.MapValueForKey(m, "enum") + if v21 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v21) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // float multiple_of = 22; + v22 := compiler.MapValueForKey(m, "multipleOf") + if v22 != nil { + v, ok := compiler.FloatForScalarNode(v22) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v22)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 23; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponse creates an object of type Response if possible, returning an error if not. +func NewResponse(in *yaml.Node, context *compiler.Context) (*Response, error) { + errors := make([]error, 0) + x := &Response{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"description"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "examples", "headers", "schema"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string description = 1; + v1 := compiler.MapValueForKey(m, "description") + if v1 != nil { + x.Description, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // SchemaItem schema = 2; + v2 := compiler.MapValueForKey(m, "schema") + if v2 != nil { + var err error + x.Schema, err = NewSchemaItem(v2, compiler.NewContext("schema", v2, context)) + if err != nil { + errors = append(errors, err) + } + } + // Headers headers = 3; + v3 := compiler.MapValueForKey(m, "headers") + if v3 != nil { + var err error + x.Headers, err = NewHeaders(v3, compiler.NewContext("headers", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // Examples examples = 4; + v4 := compiler.MapValueForKey(m, "examples") + if v4 != nil { + var err error + x.Examples, err = NewExamples(v4, compiler.NewContext("examples", v4, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 5; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponseDefinitions creates an object of type ResponseDefinitions if possible, returning an error if not. +func NewResponseDefinitions(in *yaml.Node, context *compiler.Context) (*ResponseDefinitions, error) { + errors := make([]error, 0) + x := &ResponseDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedResponse additional_properties = 1; + // MAP: Response + x.AdditionalProperties = make([]*NamedResponse, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedResponse{} + pair.Name = k + var err error + pair.Value, err = NewResponse(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponseValue creates an object of type ResponseValue if possible, returning an error if not. +func NewResponseValue(in *yaml.Node, context *compiler.Context) (*ResponseValue, error) { + errors := make([]error, 0) + x := &ResponseValue{} + matched := false + // Response response = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewResponse(m, compiler.NewContext("response", m, context)) + if matchingError == nil { + x.Oneof = &ResponseValue_Response{Response: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // JsonReference json_reference = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewJsonReference(m, compiler.NewContext("jsonReference", m, context)) + if matchingError == nil { + x.Oneof = &ResponseValue_JsonReference{JsonReference: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid ResponseValue") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewResponses creates an object of type Responses if possible, returning an error if not. +func NewResponses(in *yaml.Node, context *compiler.Context) (*Responses, error) { + errors := make([]error, 0) + x := &Responses{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{} + allowedPatterns := []*regexp.Regexp{pattern2, pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // repeated NamedResponseValue response_code = 1; + // MAP: ResponseValue ^([0-9]{3})$|^(default)$ + x.ResponseCode = make([]*NamedResponseValue, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if pattern2.MatchString(k) { + pair := &NamedResponseValue{} + pair.Name = k + var err error + pair.Value, err = NewResponseValue(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.ResponseCode = append(x.ResponseCode, pair) + } + } + } + // repeated NamedAny vendor_extension = 2; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchema creates an object of type Schema if possible, returning an error if not. +func NewSchema(in *yaml.Node, context *compiler.Context) (*Schema, error) { + errors := make([]error, 0) + x := &Schema{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"$ref", "additionalProperties", "allOf", "default", "description", "discriminator", "enum", "example", "exclusiveMaximum", "exclusiveMinimum", "externalDocs", "format", "items", "maxItems", "maxLength", "maxProperties", "maximum", "minItems", "minLength", "minProperties", "minimum", "multipleOf", "pattern", "properties", "readOnly", "required", "title", "type", "uniqueItems", "xml"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string _ref = 1; + v1 := compiler.MapValueForKey(m, "$ref") + if v1 != nil { + x.XRef, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for $ref: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string format = 2; + v2 := compiler.MapValueForKey(m, "format") + if v2 != nil { + x.Format, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for format: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string title = 3; + v3 := compiler.MapValueForKey(m, "title") + if v3 != nil { + x.Title, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for title: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 4; + v4 := compiler.MapValueForKey(m, "description") + if v4 != nil { + x.Description, ok = compiler.StringForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Any default = 5; + v5 := compiler.MapValueForKey(m, "default") + if v5 != nil { + var err error + x.Default, err = NewAny(v5, compiler.NewContext("default", v5, context)) + if err != nil { + errors = append(errors, err) + } + } + // float multiple_of = 6; + v6 := compiler.MapValueForKey(m, "multipleOf") + if v6 != nil { + v, ok := compiler.FloatForScalarNode(v6) + if ok { + x.MultipleOf = v + } else { + message := fmt.Sprintf("has unexpected value for multipleOf: %s", compiler.Display(v6)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float maximum = 7; + v7 := compiler.MapValueForKey(m, "maximum") + if v7 != nil { + v, ok := compiler.FloatForScalarNode(v7) + if ok { + x.Maximum = v + } else { + message := fmt.Sprintf("has unexpected value for maximum: %s", compiler.Display(v7)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_maximum = 8; + v8 := compiler.MapValueForKey(m, "exclusiveMaximum") + if v8 != nil { + x.ExclusiveMaximum, ok = compiler.BoolForScalarNode(v8) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMaximum: %s", compiler.Display(v8)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // float minimum = 9; + v9 := compiler.MapValueForKey(m, "minimum") + if v9 != nil { + v, ok := compiler.FloatForScalarNode(v9) + if ok { + x.Minimum = v + } else { + message := fmt.Sprintf("has unexpected value for minimum: %s", compiler.Display(v9)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool exclusive_minimum = 10; + v10 := compiler.MapValueForKey(m, "exclusiveMinimum") + if v10 != nil { + x.ExclusiveMinimum, ok = compiler.BoolForScalarNode(v10) + if !ok { + message := fmt.Sprintf("has unexpected value for exclusiveMinimum: %s", compiler.Display(v10)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_length = 11; + v11 := compiler.MapValueForKey(m, "maxLength") + if v11 != nil { + t, ok := compiler.IntForScalarNode(v11) + if ok { + x.MaxLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxLength: %s", compiler.Display(v11)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_length = 12; + v12 := compiler.MapValueForKey(m, "minLength") + if v12 != nil { + t, ok := compiler.IntForScalarNode(v12) + if ok { + x.MinLength = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minLength: %s", compiler.Display(v12)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string pattern = 13; + v13 := compiler.MapValueForKey(m, "pattern") + if v13 != nil { + x.Pattern, ok = compiler.StringForScalarNode(v13) + if !ok { + message := fmt.Sprintf("has unexpected value for pattern: %s", compiler.Display(v13)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_items = 14; + v14 := compiler.MapValueForKey(m, "maxItems") + if v14 != nil { + t, ok := compiler.IntForScalarNode(v14) + if ok { + x.MaxItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxItems: %s", compiler.Display(v14)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_items = 15; + v15 := compiler.MapValueForKey(m, "minItems") + if v15 != nil { + t, ok := compiler.IntForScalarNode(v15) + if ok { + x.MinItems = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minItems: %s", compiler.Display(v15)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool unique_items = 16; + v16 := compiler.MapValueForKey(m, "uniqueItems") + if v16 != nil { + x.UniqueItems, ok = compiler.BoolForScalarNode(v16) + if !ok { + message := fmt.Sprintf("has unexpected value for uniqueItems: %s", compiler.Display(v16)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 max_properties = 17; + v17 := compiler.MapValueForKey(m, "maxProperties") + if v17 != nil { + t, ok := compiler.IntForScalarNode(v17) + if ok { + x.MaxProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for maxProperties: %s", compiler.Display(v17)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // int64 min_properties = 18; + v18 := compiler.MapValueForKey(m, "minProperties") + if v18 != nil { + t, ok := compiler.IntForScalarNode(v18) + if ok { + x.MinProperties = int64(t) + } else { + message := fmt.Sprintf("has unexpected value for minProperties: %s", compiler.Display(v18)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated string required = 19; + v19 := compiler.MapValueForKey(m, "required") + if v19 != nil { + v, ok := compiler.SequenceNodeForNode(v19) + if ok { + x.Required = compiler.StringArrayForSequenceNode(v) + } else { + message := fmt.Sprintf("has unexpected value for required: %s", compiler.Display(v19)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated Any enum = 20; + v20 := compiler.MapValueForKey(m, "enum") + if v20 != nil { + // repeated Any + x.Enum = make([]*Any, 0) + a, ok := compiler.SequenceNodeForNode(v20) + if ok { + for _, item := range a.Content { + y, err := NewAny(item, compiler.NewContext("enum", item, context)) + if err != nil { + errors = append(errors, err) + } + x.Enum = append(x.Enum, y) + } + } + } + // AdditionalPropertiesItem additional_properties = 21; + v21 := compiler.MapValueForKey(m, "additionalProperties") + if v21 != nil { + var err error + x.AdditionalProperties, err = NewAdditionalPropertiesItem(v21, compiler.NewContext("additionalProperties", v21, context)) + if err != nil { + errors = append(errors, err) + } + } + // TypeItem type = 22; + v22 := compiler.MapValueForKey(m, "type") + if v22 != nil { + var err error + x.Type, err = NewTypeItem(v22, compiler.NewContext("type", v22, context)) + if err != nil { + errors = append(errors, err) + } + } + // ItemsItem items = 23; + v23 := compiler.MapValueForKey(m, "items") + if v23 != nil { + var err error + x.Items, err = NewItemsItem(v23, compiler.NewContext("items", v23, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated Schema all_of = 24; + v24 := compiler.MapValueForKey(m, "allOf") + if v24 != nil { + // repeated Schema + x.AllOf = make([]*Schema, 0) + a, ok := compiler.SequenceNodeForNode(v24) + if ok { + for _, item := range a.Content { + y, err := NewSchema(item, compiler.NewContext("allOf", item, context)) + if err != nil { + errors = append(errors, err) + } + x.AllOf = append(x.AllOf, y) + } + } + } + // Properties properties = 25; + v25 := compiler.MapValueForKey(m, "properties") + if v25 != nil { + var err error + x.Properties, err = NewProperties(v25, compiler.NewContext("properties", v25, context)) + if err != nil { + errors = append(errors, err) + } + } + // string discriminator = 26; + v26 := compiler.MapValueForKey(m, "discriminator") + if v26 != nil { + x.Discriminator, ok = compiler.StringForScalarNode(v26) + if !ok { + message := fmt.Sprintf("has unexpected value for discriminator: %s", compiler.Display(v26)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool read_only = 27; + v27 := compiler.MapValueForKey(m, "readOnly") + if v27 != nil { + x.ReadOnly, ok = compiler.BoolForScalarNode(v27) + if !ok { + message := fmt.Sprintf("has unexpected value for readOnly: %s", compiler.Display(v27)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // Xml xml = 28; + v28 := compiler.MapValueForKey(m, "xml") + if v28 != nil { + var err error + x.Xml, err = NewXml(v28, compiler.NewContext("xml", v28, context)) + if err != nil { + errors = append(errors, err) + } + } + // ExternalDocs external_docs = 29; + v29 := compiler.MapValueForKey(m, "externalDocs") + if v29 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v29, compiler.NewContext("externalDocs", v29, context)) + if err != nil { + errors = append(errors, err) + } + } + // Any example = 30; + v30 := compiler.MapValueForKey(m, "example") + if v30 != nil { + var err error + x.Example, err = NewAny(v30, compiler.NewContext("example", v30, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 31; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSchemaItem creates an object of type SchemaItem if possible, returning an error if not. +func NewSchemaItem(in *yaml.Node, context *compiler.Context) (*SchemaItem, error) { + errors := make([]error, 0) + x := &SchemaItem{} + matched := false + // Schema schema = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewSchema(m, compiler.NewContext("schema", m, context)) + if matchingError == nil { + x.Oneof = &SchemaItem_Schema{Schema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // FileSchema file_schema = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewFileSchema(m, compiler.NewContext("fileSchema", m, context)) + if matchingError == nil { + x.Oneof = &SchemaItem_FileSchema{FileSchema: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid SchemaItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityDefinitions creates an object of type SecurityDefinitions if possible, returning an error if not. +func NewSecurityDefinitions(in *yaml.Node, context *compiler.Context) (*SecurityDefinitions, error) { + errors := make([]error, 0) + x := &SecurityDefinitions{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedSecurityDefinitionsItem additional_properties = 1; + // MAP: SecurityDefinitionsItem + x.AdditionalProperties = make([]*NamedSecurityDefinitionsItem, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedSecurityDefinitionsItem{} + pair.Name = k + var err error + pair.Value, err = NewSecurityDefinitionsItem(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityDefinitionsItem creates an object of type SecurityDefinitionsItem if possible, returning an error if not. +func NewSecurityDefinitionsItem(in *yaml.Node, context *compiler.Context) (*SecurityDefinitionsItem, error) { + errors := make([]error, 0) + x := &SecurityDefinitionsItem{} + matched := false + // BasicAuthenticationSecurity basic_authentication_security = 1; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewBasicAuthenticationSecurity(m, compiler.NewContext("basicAuthenticationSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_BasicAuthenticationSecurity{BasicAuthenticationSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // ApiKeySecurity api_key_security = 2; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewApiKeySecurity(m, compiler.NewContext("apiKeySecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_ApiKeySecurity{ApiKeySecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2ImplicitSecurity oauth2_implicit_security = 3; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2ImplicitSecurity(m, compiler.NewContext("oauth2ImplicitSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2ImplicitSecurity{Oauth2ImplicitSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2PasswordSecurity oauth2_password_security = 4; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2PasswordSecurity(m, compiler.NewContext("oauth2PasswordSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2PasswordSecurity{Oauth2PasswordSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2ApplicationSecurity oauth2_application_security = 5; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2ApplicationSecurity(m, compiler.NewContext("oauth2ApplicationSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2ApplicationSecurity{Oauth2ApplicationSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + // Oauth2AccessCodeSecurity oauth2_access_code_security = 6; + { + m, ok := compiler.UnpackMap(in) + if ok { + // errors might be ok here, they mean we just don't have the right subtype + t, matchingError := NewOauth2AccessCodeSecurity(m, compiler.NewContext("oauth2AccessCodeSecurity", m, context)) + if matchingError == nil { + x.Oneof = &SecurityDefinitionsItem_Oauth2AccessCodeSecurity{Oauth2AccessCodeSecurity: t} + matched = true + } else { + errors = append(errors, matchingError) + } + } + } + if matched { + // since the oneof matched one of its possibilities, discard any matching errors + errors = make([]error, 0) + } else { + message := fmt.Sprintf("contains an invalid SecurityDefinitionsItem") + err := compiler.NewError(context, message) + errors = []error{err} + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewSecurityRequirement creates an object of type SecurityRequirement if possible, returning an error if not. +func NewSecurityRequirement(in *yaml.Node, context *compiler.Context) (*SecurityRequirement, error) { + errors := make([]error, 0) + x := &SecurityRequirement{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedStringArray additional_properties = 1; + // MAP: StringArray + x.AdditionalProperties = make([]*NamedStringArray, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedStringArray{} + pair.Name = k + var err error + pair.Value, err = NewStringArray(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewStringArray creates an object of type StringArray if possible, returning an error if not. +func NewStringArray(in *yaml.Node, context *compiler.Context) (*StringArray, error) { + errors := make([]error, 0) + x := &StringArray{} + x.Value = make([]string, 0) + for _, node := range in.Content { + s, _ := compiler.StringForScalarNode(node) + x.Value = append(x.Value, s) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewTag creates an object of type Tag if possible, returning an error if not. +func NewTag(in *yaml.Node, context *compiler.Context) (*Tag, error) { + errors := make([]error, 0) + x := &Tag{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + requiredKeys := []string{"name"} + missingKeys := compiler.MissingKeysInMap(m, requiredKeys) + if len(missingKeys) > 0 { + message := fmt.Sprintf("is missing required %s: %+v", compiler.PluralProperties(len(missingKeys)), strings.Join(missingKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + allowedKeys := []string{"description", "externalDocs", "name"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string description = 2; + v2 := compiler.MapValueForKey(m, "description") + if v2 != nil { + x.Description, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for description: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // ExternalDocs external_docs = 3; + v3 := compiler.MapValueForKey(m, "externalDocs") + if v3 != nil { + var err error + x.ExternalDocs, err = NewExternalDocs(v3, compiler.NewContext("externalDocs", v3, context)) + if err != nil { + errors = append(errors, err) + } + } + // repeated NamedAny vendor_extension = 4; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewTypeItem creates an object of type TypeItem if possible, returning an error if not. +func NewTypeItem(in *yaml.Node, context *compiler.Context) (*TypeItem, error) { + errors := make([]error, 0) + x := &TypeItem{} + v1 := in + switch v1.Kind { + case yaml.ScalarNode: + x.Value = make([]string, 0) + x.Value = append(x.Value, v1.Value) + case yaml.SequenceNode: + x.Value = make([]string, 0) + for _, v := range v1.Content { + value := v.Value + ok := v.Kind == yaml.ScalarNode + if ok { + x.Value = append(x.Value, value) + } else { + message := fmt.Sprintf("has unexpected value for string array element: %+v (%T)", value, value) + errors = append(errors, compiler.NewError(context, message)) + } + } + default: + message := fmt.Sprintf("has unexpected value for string array: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewVendorExtension creates an object of type VendorExtension if possible, returning an error if not. +func NewVendorExtension(in *yaml.Node, context *compiler.Context) (*VendorExtension, error) { + errors := make([]error, 0) + x := &VendorExtension{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + // repeated NamedAny additional_properties = 1; + // MAP: Any + x.AdditionalProperties = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.AdditionalProperties = append(x.AdditionalProperties, pair) + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// NewXml creates an object of type Xml if possible, returning an error if not. +func NewXml(in *yaml.Node, context *compiler.Context) (*Xml, error) { + errors := make([]error, 0) + x := &Xml{} + m, ok := compiler.UnpackMap(in) + if !ok { + message := fmt.Sprintf("has unexpected value: %+v (%T)", in, in) + errors = append(errors, compiler.NewError(context, message)) + } else { + allowedKeys := []string{"attribute", "name", "namespace", "prefix", "wrapped"} + allowedPatterns := []*regexp.Regexp{pattern0} + invalidKeys := compiler.InvalidKeysInMap(m, allowedKeys, allowedPatterns) + if len(invalidKeys) > 0 { + message := fmt.Sprintf("has invalid %s: %+v", compiler.PluralProperties(len(invalidKeys)), strings.Join(invalidKeys, ", ")) + errors = append(errors, compiler.NewError(context, message)) + } + // string name = 1; + v1 := compiler.MapValueForKey(m, "name") + if v1 != nil { + x.Name, ok = compiler.StringForScalarNode(v1) + if !ok { + message := fmt.Sprintf("has unexpected value for name: %s", compiler.Display(v1)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string namespace = 2; + v2 := compiler.MapValueForKey(m, "namespace") + if v2 != nil { + x.Namespace, ok = compiler.StringForScalarNode(v2) + if !ok { + message := fmt.Sprintf("has unexpected value for namespace: %s", compiler.Display(v2)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // string prefix = 3; + v3 := compiler.MapValueForKey(m, "prefix") + if v3 != nil { + x.Prefix, ok = compiler.StringForScalarNode(v3) + if !ok { + message := fmt.Sprintf("has unexpected value for prefix: %s", compiler.Display(v3)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool attribute = 4; + v4 := compiler.MapValueForKey(m, "attribute") + if v4 != nil { + x.Attribute, ok = compiler.BoolForScalarNode(v4) + if !ok { + message := fmt.Sprintf("has unexpected value for attribute: %s", compiler.Display(v4)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // bool wrapped = 5; + v5 := compiler.MapValueForKey(m, "wrapped") + if v5 != nil { + x.Wrapped, ok = compiler.BoolForScalarNode(v5) + if !ok { + message := fmt.Sprintf("has unexpected value for wrapped: %s", compiler.Display(v5)) + errors = append(errors, compiler.NewError(context, message)) + } + } + // repeated NamedAny vendor_extension = 6; + // MAP: Any ^x- + x.VendorExtension = make([]*NamedAny, 0) + for i := 0; i < len(m.Content); i += 2 { + k, ok := compiler.StringForScalarNode(m.Content[i]) + if ok { + v := m.Content[i+1] + if strings.HasPrefix(k, "x-") { + pair := &NamedAny{} + pair.Name = k + result := &Any{} + handled, resultFromExt, err := compiler.CallExtension(context, v, k) + if handled { + if err != nil { + errors = append(errors, err) + } else { + bytes := compiler.Marshal(v) + result.Yaml = string(bytes) + result.Value = resultFromExt + pair.Value = result + } + } else { + pair.Value, err = NewAny(v, compiler.NewContext(k, v, context)) + if err != nil { + errors = append(errors, err) + } + } + x.VendorExtension = append(x.VendorExtension, pair) + } + } + } + } + return x, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside AdditionalPropertiesItem objects. +func (m *AdditionalPropertiesItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*AdditionalPropertiesItem_Schema) + if ok { + _, err := p.Schema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Any objects. +func (m *Any) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ApiKeySecurity objects. +func (m *ApiKeySecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside BasicAuthenticationSecurity objects. +func (m *BasicAuthenticationSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside BodyParameter objects. +func (m *BodyParameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Contact objects. +func (m *Contact) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Default objects. +func (m *Default) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Definitions objects. +func (m *Definitions) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Document objects. +func (m *Document) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Info != nil { + _, err := m.Info.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Paths != nil { + _, err := m.Paths.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Definitions != nil { + _, err := m.Definitions.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Parameters != nil { + _, err := m.Parameters.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.SecurityDefinitions != nil { + _, err := m.SecurityDefinitions.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Tags { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Examples objects. +func (m *Examples) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ExternalDocs objects. +func (m *ExternalDocs) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside FileSchema objects. +func (m *FileSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside FormDataParameterSubSchema objects. +func (m *FormDataParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Header objects. +func (m *Header) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside HeaderParameterSubSchema objects. +func (m *HeaderParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Headers objects. +func (m *Headers) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Info objects. +func (m *Info) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Contact != nil { + _, err := m.Contact.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.License != nil { + _, err := m.License.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ItemsItem objects. +func (m *ItemsItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.Schema { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside JsonReference objects. +func (m *JsonReference) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewJsonReference(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside License objects. +func (m *License) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedAny objects. +func (m *NamedAny) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedHeader objects. +func (m *NamedHeader) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedParameter objects. +func (m *NamedParameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedPathItem objects. +func (m *NamedPathItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResponse objects. +func (m *NamedResponse) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedResponseValue objects. +func (m *NamedResponseValue) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSchema objects. +func (m *NamedSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedSecurityDefinitionsItem objects. +func (m *NamedSecurityDefinitionsItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedString objects. +func (m *NamedString) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NamedStringArray objects. +func (m *NamedStringArray) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Value != nil { + _, err := m.Value.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside NonBodyParameter objects. +func (m *NonBodyParameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*NonBodyParameter_HeaderParameterSubSchema) + if ok { + _, err := p.HeaderParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_FormDataParameterSubSchema) + if ok { + _, err := p.FormDataParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_QueryParameterSubSchema) + if ok { + _, err := p.QueryParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*NonBodyParameter_PathParameterSubSchema) + if ok { + _, err := p.PathParameterSubSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2AccessCodeSecurity objects. +func (m *Oauth2AccessCodeSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2ApplicationSecurity objects. +func (m *Oauth2ApplicationSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2ImplicitSecurity objects. +func (m *Oauth2ImplicitSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2PasswordSecurity objects. +func (m *Oauth2PasswordSecurity) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Scopes != nil { + _, err := m.Scopes.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Oauth2Scopes objects. +func (m *Oauth2Scopes) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Operation objects. +func (m *Operation) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Responses != nil { + _, err := m.Responses.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Security { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Parameter objects. +func (m *Parameter) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*Parameter_BodyParameter) + if ok { + _, err := p.BodyParameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*Parameter_NonBodyParameter) + if ok { + _, err := p.NonBodyParameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParameterDefinitions objects. +func (m *ParameterDefinitions) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ParametersItem objects. +func (m *ParametersItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ParametersItem_Parameter) + if ok { + _, err := p.Parameter.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ParametersItem_JsonReference) + if ok { + info, err := p.JsonReference.ResolveReferences(root) + if err != nil { + return nil, err + } else if info != nil { + n, err := NewParametersItem(info, nil) + if err != nil { + return nil, err + } else if n != nil { + *m = *n + return nil, nil + } + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PathItem objects. +func (m *PathItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewPathItem(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Get != nil { + _, err := m.Get.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Put != nil { + _, err := m.Put.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Post != nil { + _, err := m.Post.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Delete != nil { + _, err := m.Delete.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Options != nil { + _, err := m.Options.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Head != nil { + _, err := m.Head.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Patch != nil { + _, err := m.Patch.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Parameters { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PathParameterSubSchema objects. +func (m *PathParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Paths objects. +func (m *Paths) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.Path { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside PrimitivesItems objects. +func (m *PrimitivesItems) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Properties objects. +func (m *Properties) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside QueryParameterSubSchema objects. +func (m *QueryParameterSubSchema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Response objects. +func (m *Response) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.Schema != nil { + _, err := m.Schema.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Headers != nil { + _, err := m.Headers.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Examples != nil { + _, err := m.Examples.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponseDefinitions objects. +func (m *ResponseDefinitions) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside ResponseValue objects. +func (m *ResponseValue) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*ResponseValue_Response) + if ok { + _, err := p.Response.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*ResponseValue_JsonReference) + if ok { + info, err := p.JsonReference.ResolveReferences(root) + if err != nil { + return nil, err + } else if info != nil { + n, err := NewResponseValue(info, nil) + if err != nil { + return nil, err + } else if n != nil { + *m = *n + return nil, nil + } + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Responses objects. +func (m *Responses) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.ResponseCode { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Schema objects. +func (m *Schema) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.XRef != "" { + info, err := compiler.ReadInfoForRef(root, m.XRef) + if err != nil { + return nil, err + } + if info != nil { + replacement, err := NewSchema(info, nil) + if err == nil { + *m = *replacement + return m.ResolveReferences(root) + } + } + return info, nil + } + if m.Default != nil { + _, err := m.Default.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.Enum { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.AdditionalProperties != nil { + _, err := m.AdditionalProperties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Type != nil { + _, err := m.Type.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Items != nil { + _, err := m.Items.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.AllOf { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + if m.Properties != nil { + _, err := m.Properties.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Xml != nil { + _, err := m.Xml.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + if m.Example != nil { + _, err := m.Example.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SchemaItem objects. +func (m *SchemaItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SchemaItem_Schema) + if ok { + _, err := p.Schema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SchemaItem_FileSchema) + if ok { + _, err := p.FileSchema.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityDefinitions objects. +func (m *SecurityDefinitions) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityDefinitionsItem objects. +func (m *SecurityDefinitionsItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_BasicAuthenticationSecurity) + if ok { + _, err := p.BasicAuthenticationSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_ApiKeySecurity) + if ok { + _, err := p.ApiKeySecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2ImplicitSecurity) + if ok { + _, err := p.Oauth2ImplicitSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2PasswordSecurity) + if ok { + _, err := p.Oauth2PasswordSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2ApplicationSecurity) + if ok { + _, err := p.Oauth2ApplicationSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + { + p, ok := m.Oneof.(*SecurityDefinitionsItem_Oauth2AccessCodeSecurity) + if ok { + _, err := p.Oauth2AccessCodeSecurity.ResolveReferences(root) + if err != nil { + return nil, err + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside SecurityRequirement objects. +func (m *SecurityRequirement) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside StringArray objects. +func (m *StringArray) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Tag objects. +func (m *Tag) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + if m.ExternalDocs != nil { + _, err := m.ExternalDocs.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside TypeItem objects. +func (m *TypeItem) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside VendorExtension objects. +func (m *VendorExtension) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.AdditionalProperties { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ResolveReferences resolves references found inside Xml objects. +func (m *Xml) ResolveReferences(root string) (*yaml.Node, error) { + errors := make([]error, 0) + for _, item := range m.VendorExtension { + if item != nil { + _, err := item.ResolveReferences(root) + if err != nil { + errors = append(errors, err) + } + } + } + return nil, compiler.NewErrorGroupOrNil(errors) +} + +// ToRawInfo returns a description of AdditionalPropertiesItem suitable for JSON or YAML export. +func (m *AdditionalPropertiesItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // AdditionalPropertiesItem + // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:boolean Type:bool StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + if v1, ok := m.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { + return compiler.NewScalarNodeForBool(v1.Boolean) + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Any suitable for JSON or YAML export. +func (m *Any) ToRawInfo() *yaml.Node { + var err error + var node yaml.Node + err = yaml.Unmarshal([]byte(m.Yaml), &node) + if err == nil { + if node.Kind == yaml.DocumentNode { + return node.Content[0] + } + return &node + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of ApiKeySecurity suitable for JSON or YAML export. +func (m *ApiKeySecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of BasicAuthenticationSecurity suitable for JSON or YAML export. +func (m *BasicAuthenticationSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of BodyParameter suitable for JSON or YAML export. +func (m *BodyParameter) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, m.Schema.ToRawInfo()) + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Contact suitable for JSON or YAML export. +func (m *Contact) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Url != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + } + if m.Email != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("email")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Email)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Default suitable for JSON or YAML export. +func (m *Default) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Definitions suitable for JSON or YAML export. +func (m *Definitions) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Document suitable for JSON or YAML export. +func (m *Document) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("swagger")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Swagger)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("info")) + info.Content = append(info.Content, m.Info.ToRawInfo()) + if m.Host != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("host")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Host)) + } + if m.BasePath != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("basePath")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.BasePath)) + } + if len(m.Schemes) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schemes")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Schemes)) + } + if len(m.Consumes) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("consumes")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Consumes)) + } + if len(m.Produces) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("produces")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Produces)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("paths")) + info.Content = append(info.Content, m.Paths.ToRawInfo()) + if m.Definitions != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("definitions")) + info.Content = append(info.Content, m.Definitions.ToRawInfo()) + } + if m.Parameters != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, m.Parameters.ToRawInfo()) + } + if m.Responses != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) + info.Content = append(info.Content, m.Responses.ToRawInfo()) + } + if len(m.Security) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Security { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) + info.Content = append(info.Content, items) + } + if m.SecurityDefinitions != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("securityDefinitions")) + info.Content = append(info.Content, m.SecurityDefinitions.ToRawInfo()) + } + if len(m.Tags) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Tags { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) + info.Content = append(info.Content, items) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Examples suitable for JSON or YAML export. +func (m *Examples) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ExternalDocs suitable for JSON or YAML export. +func (m *ExternalDocs) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of FileSchema suitable for JSON or YAML export. +func (m *FileSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Title != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if len(m.Required) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Required)) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + if m.ReadOnly != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("readOnly")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ReadOnly)) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.Example != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) + info.Content = append(info.Content, m.Example.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of FormDataParameterSubSchema suitable for JSON or YAML export. +func (m *FormDataParameterSubSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.AllowEmptyValue != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Header suitable for JSON or YAML export. +func (m *Header) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of HeaderParameterSubSchema suitable for JSON or YAML export. +func (m *HeaderParameterSubSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Headers suitable for JSON or YAML export. +func (m *Headers) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Info suitable for JSON or YAML export. +func (m *Info) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("version")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Version)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.TermsOfService != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("termsOfService")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TermsOfService)) + } + if m.Contact != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("contact")) + info.Content = append(info.Content, m.Contact.ToRawInfo()) + } + if m.License != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("license")) + info.Content = append(info.Content, m.License.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ItemsItem suitable for JSON or YAML export. +func (m *ItemsItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.Schema) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Schema { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, items) + } + return info +} + +// ToRawInfo returns a description of JsonReference suitable for JSON or YAML export. +func (m *JsonReference) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + return info +} + +// ToRawInfo returns a description of License suitable for JSON or YAML export. +func (m *License) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + if m.Url != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("url")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Url)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of NamedAny suitable for JSON or YAML export. +func (m *NamedAny) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Value != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, m.Value.ToRawInfo()) + } + return info +} + +// ToRawInfo returns a description of NamedHeader suitable for JSON or YAML export. +func (m *NamedHeader) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Header StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedParameter suitable for JSON or YAML export. +func (m *NamedParameter) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedPathItem suitable for JSON or YAML export. +func (m *NamedPathItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:PathItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResponse suitable for JSON or YAML export. +func (m *NamedResponse) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedResponseValue suitable for JSON or YAML export. +func (m *NamedResponseValue) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:ResponseValue StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSchema suitable for JSON or YAML export. +func (m *NamedSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedSecurityDefinitionsItem suitable for JSON or YAML export. +func (m *NamedSecurityDefinitionsItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:SecurityDefinitionsItem StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NamedString suitable for JSON or YAML export. +func (m *NamedString) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Value != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Value)) + } + return info +} + +// ToRawInfo returns a description of NamedStringArray suitable for JSON or YAML export. +func (m *NamedStringArray) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + // &{Name:value Type:StringArray StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:Mapped value} + return info +} + +// ToRawInfo returns a description of NonBodyParameter suitable for JSON or YAML export. +func (m *NonBodyParameter) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // NonBodyParameter + // {Name:headerParameterSubSchema Type:HeaderParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetHeaderParameterSubSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:formDataParameterSubSchema Type:FormDataParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetFormDataParameterSubSchema() + if v1 != nil { + return v1.ToRawInfo() + } + // {Name:queryParameterSubSchema Type:QueryParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v2 := m.GetQueryParameterSubSchema() + if v2 != nil { + return v2.ToRawInfo() + } + // {Name:pathParameterSubSchema Type:PathParameterSubSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v3 := m.GetPathParameterSubSchema() + if v3 != nil { + return v3.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Oauth2AccessCodeSecurity suitable for JSON or YAML export. +func (m *Oauth2AccessCodeSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.AuthorizationUrl)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Oauth2ApplicationSecurity suitable for JSON or YAML export. +func (m *Oauth2ApplicationSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Oauth2ImplicitSecurity suitable for JSON or YAML export. +func (m *Oauth2ImplicitSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("authorizationUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.AuthorizationUrl)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Oauth2PasswordSecurity suitable for JSON or YAML export. +func (m *Oauth2PasswordSecurity) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("flow")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Flow)) + if m.Scopes != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("scopes")) + info.Content = append(info.Content, m.Scopes.ToRawInfo()) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("tokenUrl")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.TokenUrl)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Oauth2Scopes suitable for JSON or YAML export. +func (m *Oauth2Scopes) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // &{Name:additionalProperties Type:NamedString StringEnumValues:[] MapType:string Repeated:true Pattern: Implicit:true Description:} + return info +} + +// ToRawInfo returns a description of Operation suitable for JSON or YAML export. +func (m *Operation) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.Tags) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("tags")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Tags)) + } + if m.Summary != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("summary")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Summary)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.OperationId != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("operationId")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.OperationId)) + } + if len(m.Produces) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("produces")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Produces)) + } + if len(m.Consumes) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("consumes")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Consumes)) + } + if len(m.Parameters) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Parameters { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, items) + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("responses")) + info.Content = append(info.Content, m.Responses.ToRawInfo()) + if len(m.Schemes) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schemes")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Schemes)) + } + if m.Deprecated != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("deprecated")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Deprecated)) + } + if len(m.Security) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Security { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("security")) + info.Content = append(info.Content, items) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Parameter suitable for JSON or YAML export. +func (m *Parameter) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // Parameter + // {Name:bodyParameter Type:BodyParameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetBodyParameter() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:nonBodyParameter Type:NonBodyParameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetNonBodyParameter() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of ParameterDefinitions suitable for JSON or YAML export. +func (m *ParameterDefinitions) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ParametersItem suitable for JSON or YAML export. +func (m *ParametersItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // ParametersItem + // {Name:parameter Type:Parameter StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetParameter() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:jsonReference Type:JsonReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetJsonReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of PathItem suitable for JSON or YAML export. +func (m *PathItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.XRef != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) + } + if m.Get != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("get")) + info.Content = append(info.Content, m.Get.ToRawInfo()) + } + if m.Put != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("put")) + info.Content = append(info.Content, m.Put.ToRawInfo()) + } + if m.Post != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("post")) + info.Content = append(info.Content, m.Post.ToRawInfo()) + } + if m.Delete != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("delete")) + info.Content = append(info.Content, m.Delete.ToRawInfo()) + } + if m.Options != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("options")) + info.Content = append(info.Content, m.Options.ToRawInfo()) + } + if m.Head != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("head")) + info.Content = append(info.Content, m.Head.ToRawInfo()) + } + if m.Patch != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("patch")) + info.Content = append(info.Content, m.Patch.ToRawInfo()) + } + if len(m.Parameters) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Parameters { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("parameters")) + info.Content = append(info.Content, items) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of PathParameterSubSchema suitable for JSON or YAML export. +func (m *PathParameterSubSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Paths suitable for JSON or YAML export. +func (m *Paths) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + if m.Path != nil { + for _, item := range m.Path { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of PrimitivesItems suitable for JSON or YAML export. +func (m *PrimitivesItems) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Properties suitable for JSON or YAML export. +func (m *Properties) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of QueryParameterSubSchema suitable for JSON or YAML export. +func (m *QueryParameterSubSchema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Required != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Required)) + } + if m.In != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("in")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.In)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.AllowEmptyValue != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("allowEmptyValue")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.AllowEmptyValue)) + } + if m.Type != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Items != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, m.Items.ToRawInfo()) + } + if m.CollectionFormat != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("collectionFormat")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.CollectionFormat)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Response suitable for JSON or YAML export. +func (m *Response) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + if m.Schema != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("schema")) + info.Content = append(info.Content, m.Schema.ToRawInfo()) + } + if m.Headers != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("headers")) + info.Content = append(info.Content, m.Headers.ToRawInfo()) + } + if m.Examples != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("examples")) + info.Content = append(info.Content, m.Examples.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ResponseDefinitions suitable for JSON or YAML export. +func (m *ResponseDefinitions) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of ResponseValue suitable for JSON or YAML export. +func (m *ResponseValue) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // ResponseValue + // {Name:response Type:Response StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetResponse() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:jsonReference Type:JsonReference StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetJsonReference() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of Responses suitable for JSON or YAML export. +func (m *Responses) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.ResponseCode != nil { + for _, item := range m.ResponseCode { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Schema suitable for JSON or YAML export. +func (m *Schema) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.XRef != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("$ref")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.XRef)) + } + if m.Format != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("format")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Format)) + } + if m.Title != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("title")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Title)) + } + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.Default != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("default")) + info.Content = append(info.Content, m.Default.ToRawInfo()) + } + if m.MultipleOf != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("multipleOf")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.MultipleOf)) + } + if m.Maximum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Maximum)) + } + if m.ExclusiveMaximum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMaximum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMaximum)) + } + if m.Minimum != 0.0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForFloat(m.Minimum)) + } + if m.ExclusiveMinimum != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("exclusiveMinimum")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ExclusiveMinimum)) + } + if m.MaxLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxLength)) + } + if m.MinLength != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minLength")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinLength)) + } + if m.Pattern != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("pattern")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Pattern)) + } + if m.MaxItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxItems)) + } + if m.MinItems != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinItems)) + } + if m.UniqueItems != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("uniqueItems")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.UniqueItems)) + } + if m.MaxProperties != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("maxProperties")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MaxProperties)) + } + if m.MinProperties != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("minProperties")) + info.Content = append(info.Content, compiler.NewScalarNodeForInt(m.MinProperties)) + } + if len(m.Required) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("required")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Required)) + } + if len(m.Enum) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.Enum { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("enum")) + info.Content = append(info.Content, items) + } + if m.AdditionalProperties != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("additionalProperties")) + info.Content = append(info.Content, m.AdditionalProperties.ToRawInfo()) + } + if m.Type != nil { + if len(m.Type.Value) == 1 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Type.Value[0])) + } else { + info.Content = append(info.Content, compiler.NewScalarNodeForString("type")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Type.Value)) + } + } + if m.Items != nil { + items := compiler.NewSequenceNode() + for _, item := range m.Items.Schema { + items.Content = append(items.Content, item.ToRawInfo()) + } + if len(items.Content) == 1 { + items = items.Content[0] + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("items")) + info.Content = append(info.Content, items) + } + if len(m.AllOf) != 0 { + items := compiler.NewSequenceNode() + for _, item := range m.AllOf { + items.Content = append(items.Content, item.ToRawInfo()) + } + info.Content = append(info.Content, compiler.NewScalarNodeForString("allOf")) + info.Content = append(info.Content, items) + } + if m.Properties != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("properties")) + info.Content = append(info.Content, m.Properties.ToRawInfo()) + } + if m.Discriminator != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("discriminator")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Discriminator)) + } + if m.ReadOnly != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("readOnly")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.ReadOnly)) + } + if m.Xml != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("xml")) + info.Content = append(info.Content, m.Xml.ToRawInfo()) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.Example != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("example")) + info.Content = append(info.Content, m.Example.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SchemaItem suitable for JSON or YAML export. +func (m *SchemaItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // SchemaItem + // {Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetSchema() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:fileSchema Type:FileSchema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetFileSchema() + if v1 != nil { + return v1.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of SecurityDefinitions suitable for JSON or YAML export. +func (m *SecurityDefinitions) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of SecurityDefinitionsItem suitable for JSON or YAML export. +func (m *SecurityDefinitionsItem) ToRawInfo() *yaml.Node { + // ONE OF WRAPPER + // SecurityDefinitionsItem + // {Name:basicAuthenticationSecurity Type:BasicAuthenticationSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v0 := m.GetBasicAuthenticationSecurity() + if v0 != nil { + return v0.ToRawInfo() + } + // {Name:apiKeySecurity Type:ApiKeySecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v1 := m.GetApiKeySecurity() + if v1 != nil { + return v1.ToRawInfo() + } + // {Name:oauth2ImplicitSecurity Type:Oauth2ImplicitSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v2 := m.GetOauth2ImplicitSecurity() + if v2 != nil { + return v2.ToRawInfo() + } + // {Name:oauth2PasswordSecurity Type:Oauth2PasswordSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v3 := m.GetOauth2PasswordSecurity() + if v3 != nil { + return v3.ToRawInfo() + } + // {Name:oauth2ApplicationSecurity Type:Oauth2ApplicationSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v4 := m.GetOauth2ApplicationSecurity() + if v4 != nil { + return v4.ToRawInfo() + } + // {Name:oauth2AccessCodeSecurity Type:Oauth2AccessCodeSecurity StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:} + v5 := m.GetOauth2AccessCodeSecurity() + if v5 != nil { + return v5.ToRawInfo() + } + return compiler.NewNullNode() +} + +// ToRawInfo returns a description of SecurityRequirement suitable for JSON or YAML export. +func (m *SecurityRequirement) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of StringArray suitable for JSON or YAML export. +func (m *StringArray) ToRawInfo() *yaml.Node { + return compiler.NewSequenceNodeForStringArray(m.Value) +} + +// ToRawInfo returns a description of Tag suitable for JSON or YAML export. +func (m *Tag) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + // always include this required field. + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + if m.Description != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("description")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Description)) + } + if m.ExternalDocs != nil { + info.Content = append(info.Content, compiler.NewScalarNodeForString("externalDocs")) + info.Content = append(info.Content, m.ExternalDocs.ToRawInfo()) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of TypeItem suitable for JSON or YAML export. +func (m *TypeItem) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if len(m.Value) != 0 { + info.Content = append(info.Content, compiler.NewScalarNodeForString("value")) + info.Content = append(info.Content, compiler.NewSequenceNodeForStringArray(m.Value)) + } + return info +} + +// ToRawInfo returns a description of VendorExtension suitable for JSON or YAML export. +func (m *VendorExtension) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.AdditionalProperties != nil { + for _, item := range m.AdditionalProperties { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +// ToRawInfo returns a description of Xml suitable for JSON or YAML export. +func (m *Xml) ToRawInfo() *yaml.Node { + info := compiler.NewMappingNode() + if m == nil { + return info + } + if m.Name != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("name")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Name)) + } + if m.Namespace != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("namespace")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Namespace)) + } + if m.Prefix != "" { + info.Content = append(info.Content, compiler.NewScalarNodeForString("prefix")) + info.Content = append(info.Content, compiler.NewScalarNodeForString(m.Prefix)) + } + if m.Attribute != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("attribute")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Attribute)) + } + if m.Wrapped != false { + info.Content = append(info.Content, compiler.NewScalarNodeForString("wrapped")) + info.Content = append(info.Content, compiler.NewScalarNodeForBool(m.Wrapped)) + } + if m.VendorExtension != nil { + for _, item := range m.VendorExtension { + info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) + info.Content = append(info.Content, item.Value.ToRawInfo()) + } + } + return info +} + +var ( + pattern0 = regexp.MustCompile("^x-") + pattern1 = regexp.MustCompile("^/") + pattern2 = regexp.MustCompile("^([0-9]{3})$|^(default)$") +) diff --git a/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.pb.go b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.pb.go new file mode 100644 index 00000000..65c4c913 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.pb.go @@ -0,0 +1,7342 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.27.1 +// protoc v3.19.3 +// source: openapiv2/OpenAPIv2.proto + +package openapi_v2 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + anypb "google.golang.org/protobuf/types/known/anypb" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type AdditionalPropertiesItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Oneof: + // *AdditionalPropertiesItem_Schema + // *AdditionalPropertiesItem_Boolean + Oneof isAdditionalPropertiesItem_Oneof `protobuf_oneof:"oneof"` +} + +func (x *AdditionalPropertiesItem) Reset() { + *x = AdditionalPropertiesItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AdditionalPropertiesItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AdditionalPropertiesItem) ProtoMessage() {} + +func (x *AdditionalPropertiesItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AdditionalPropertiesItem.ProtoReflect.Descriptor instead. +func (*AdditionalPropertiesItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{0} +} + +func (m *AdditionalPropertiesItem) GetOneof() isAdditionalPropertiesItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (x *AdditionalPropertiesItem) GetSchema() *Schema { + if x, ok := x.GetOneof().(*AdditionalPropertiesItem_Schema); ok { + return x.Schema + } + return nil +} + +func (x *AdditionalPropertiesItem) GetBoolean() bool { + if x, ok := x.GetOneof().(*AdditionalPropertiesItem_Boolean); ok { + return x.Boolean + } + return false +} + +type isAdditionalPropertiesItem_Oneof interface { + isAdditionalPropertiesItem_Oneof() +} + +type AdditionalPropertiesItem_Schema struct { + Schema *Schema `protobuf:"bytes,1,opt,name=schema,proto3,oneof"` +} + +type AdditionalPropertiesItem_Boolean struct { + Boolean bool `protobuf:"varint,2,opt,name=boolean,proto3,oneof"` +} + +func (*AdditionalPropertiesItem_Schema) isAdditionalPropertiesItem_Oneof() {} + +func (*AdditionalPropertiesItem_Boolean) isAdditionalPropertiesItem_Oneof() {} + +type Any struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Value *anypb.Any `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + Yaml string `protobuf:"bytes,2,opt,name=yaml,proto3" json:"yaml,omitempty"` +} + +func (x *Any) Reset() { + *x = Any{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Any) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Any) ProtoMessage() {} + +func (x *Any) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Any.ProtoReflect.Descriptor instead. +func (*Any) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{1} +} + +func (x *Any) GetValue() *anypb.Any { + if x != nil { + return x.Value + } + return nil +} + +func (x *Any) GetYaml() string { + if x != nil { + return x.Yaml + } + return "" +} + +type ApiKeySecurity struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + In string `protobuf:"bytes,3,opt,name=in,proto3" json:"in,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,5,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *ApiKeySecurity) Reset() { + *x = ApiKeySecurity{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ApiKeySecurity) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ApiKeySecurity) ProtoMessage() {} + +func (x *ApiKeySecurity) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ApiKeySecurity.ProtoReflect.Descriptor instead. +func (*ApiKeySecurity) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{2} +} + +func (x *ApiKeySecurity) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *ApiKeySecurity) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ApiKeySecurity) GetIn() string { + if x != nil { + return x.In + } + return "" +} + +func (x *ApiKeySecurity) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *ApiKeySecurity) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type BasicAuthenticationSecurity struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,3,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *BasicAuthenticationSecurity) Reset() { + *x = BasicAuthenticationSecurity{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BasicAuthenticationSecurity) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BasicAuthenticationSecurity) ProtoMessage() {} + +func (x *BasicAuthenticationSecurity) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BasicAuthenticationSecurity.ProtoReflect.Descriptor instead. +func (*BasicAuthenticationSecurity) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{3} +} + +func (x *BasicAuthenticationSecurity) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *BasicAuthenticationSecurity) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *BasicAuthenticationSecurity) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type BodyParameter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,3,opt,name=in,proto3" json:"in,omitempty"` + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,4,opt,name=required,proto3" json:"required,omitempty"` + Schema *Schema `protobuf:"bytes,5,opt,name=schema,proto3" json:"schema,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *BodyParameter) Reset() { + *x = BodyParameter{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BodyParameter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BodyParameter) ProtoMessage() {} + +func (x *BodyParameter) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BodyParameter.ProtoReflect.Descriptor instead. +func (*BodyParameter) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{4} +} + +func (x *BodyParameter) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *BodyParameter) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *BodyParameter) GetIn() string { + if x != nil { + return x.In + } + return "" +} + +func (x *BodyParameter) GetRequired() bool { + if x != nil { + return x.Required + } + return false +} + +func (x *BodyParameter) GetSchema() *Schema { + if x != nil { + return x.Schema + } + return nil +} + +func (x *BodyParameter) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +// Contact information for the owners of the API. +type Contact struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The identifying name of the contact person/organization. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The URL pointing to the contact information. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + // The email address of the contact person/organization. + Email string `protobuf:"bytes,3,opt,name=email,proto3" json:"email,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,4,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Contact) Reset() { + *x = Contact{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Contact) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Contact) ProtoMessage() {} + +func (x *Contact) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Contact.ProtoReflect.Descriptor instead. +func (*Contact) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{5} +} + +func (x *Contact) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Contact) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *Contact) GetEmail() string { + if x != nil { + return x.Email + } + return "" +} + +func (x *Contact) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Default struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedAny `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *Default) Reset() { + *x = Default{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Default) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Default) ProtoMessage() {} + +func (x *Default) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Default.ProtoReflect.Descriptor instead. +func (*Default) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{6} +} + +func (x *Default) GetAdditionalProperties() []*NamedAny { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +// One or more JSON objects describing the schemas being consumed and produced by the API. +type Definitions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedSchema `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *Definitions) Reset() { + *x = Definitions{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Definitions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Definitions) ProtoMessage() {} + +func (x *Definitions) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Definitions.ProtoReflect.Descriptor instead. +func (*Definitions) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{7} +} + +func (x *Definitions) GetAdditionalProperties() []*NamedSchema { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +type Document struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The Swagger version of this document. + Swagger string `protobuf:"bytes,1,opt,name=swagger,proto3" json:"swagger,omitempty"` + Info *Info `protobuf:"bytes,2,opt,name=info,proto3" json:"info,omitempty"` + // The host (name or ip) of the API. Example: 'swagger.io' + Host string `protobuf:"bytes,3,opt,name=host,proto3" json:"host,omitempty"` + // The base path to the API. Example: '/api'. + BasePath string `protobuf:"bytes,4,opt,name=base_path,json=basePath,proto3" json:"base_path,omitempty"` + // The transfer protocol of the API. + Schemes []string `protobuf:"bytes,5,rep,name=schemes,proto3" json:"schemes,omitempty"` + // A list of MIME types accepted by the API. + Consumes []string `protobuf:"bytes,6,rep,name=consumes,proto3" json:"consumes,omitempty"` + // A list of MIME types the API can produce. + Produces []string `protobuf:"bytes,7,rep,name=produces,proto3" json:"produces,omitempty"` + Paths *Paths `protobuf:"bytes,8,opt,name=paths,proto3" json:"paths,omitempty"` + Definitions *Definitions `protobuf:"bytes,9,opt,name=definitions,proto3" json:"definitions,omitempty"` + Parameters *ParameterDefinitions `protobuf:"bytes,10,opt,name=parameters,proto3" json:"parameters,omitempty"` + Responses *ResponseDefinitions `protobuf:"bytes,11,opt,name=responses,proto3" json:"responses,omitempty"` + Security []*SecurityRequirement `protobuf:"bytes,12,rep,name=security,proto3" json:"security,omitempty"` + SecurityDefinitions *SecurityDefinitions `protobuf:"bytes,13,opt,name=security_definitions,json=securityDefinitions,proto3" json:"security_definitions,omitempty"` + Tags []*Tag `protobuf:"bytes,14,rep,name=tags,proto3" json:"tags,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,15,opt,name=external_docs,json=externalDocs,proto3" json:"external_docs,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,16,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Document) Reset() { + *x = Document{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Document) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Document) ProtoMessage() {} + +func (x *Document) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Document.ProtoReflect.Descriptor instead. +func (*Document) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{8} +} + +func (x *Document) GetSwagger() string { + if x != nil { + return x.Swagger + } + return "" +} + +func (x *Document) GetInfo() *Info { + if x != nil { + return x.Info + } + return nil +} + +func (x *Document) GetHost() string { + if x != nil { + return x.Host + } + return "" +} + +func (x *Document) GetBasePath() string { + if x != nil { + return x.BasePath + } + return "" +} + +func (x *Document) GetSchemes() []string { + if x != nil { + return x.Schemes + } + return nil +} + +func (x *Document) GetConsumes() []string { + if x != nil { + return x.Consumes + } + return nil +} + +func (x *Document) GetProduces() []string { + if x != nil { + return x.Produces + } + return nil +} + +func (x *Document) GetPaths() *Paths { + if x != nil { + return x.Paths + } + return nil +} + +func (x *Document) GetDefinitions() *Definitions { + if x != nil { + return x.Definitions + } + return nil +} + +func (x *Document) GetParameters() *ParameterDefinitions { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *Document) GetResponses() *ResponseDefinitions { + if x != nil { + return x.Responses + } + return nil +} + +func (x *Document) GetSecurity() []*SecurityRequirement { + if x != nil { + return x.Security + } + return nil +} + +func (x *Document) GetSecurityDefinitions() *SecurityDefinitions { + if x != nil { + return x.SecurityDefinitions + } + return nil +} + +func (x *Document) GetTags() []*Tag { + if x != nil { + return x.Tags + } + return nil +} + +func (x *Document) GetExternalDocs() *ExternalDocs { + if x != nil { + return x.ExternalDocs + } + return nil +} + +func (x *Document) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Examples struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedAny `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *Examples) Reset() { + *x = Examples{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Examples) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Examples) ProtoMessage() {} + +func (x *Examples) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Examples.ProtoReflect.Descriptor instead. +func (*Examples) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{9} +} + +func (x *Examples) GetAdditionalProperties() []*NamedAny { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +// information about external documentation +type ExternalDocs struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,3,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *ExternalDocs) Reset() { + *x = ExternalDocs{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExternalDocs) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExternalDocs) ProtoMessage() {} + +func (x *ExternalDocs) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExternalDocs.ProtoReflect.Descriptor instead. +func (*ExternalDocs) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{10} +} + +func (x *ExternalDocs) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *ExternalDocs) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *ExternalDocs) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +// A deterministic version of a JSON Schema object. +type FileSchema struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Format string `protobuf:"bytes,1,opt,name=format,proto3" json:"format,omitempty"` + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + Default *Any `protobuf:"bytes,4,opt,name=default,proto3" json:"default,omitempty"` + Required []string `protobuf:"bytes,5,rep,name=required,proto3" json:"required,omitempty"` + Type string `protobuf:"bytes,6,opt,name=type,proto3" json:"type,omitempty"` + ReadOnly bool `protobuf:"varint,7,opt,name=read_only,json=readOnly,proto3" json:"read_only,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,8,opt,name=external_docs,json=externalDocs,proto3" json:"external_docs,omitempty"` + Example *Any `protobuf:"bytes,9,opt,name=example,proto3" json:"example,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,10,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *FileSchema) Reset() { + *x = FileSchema{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileSchema) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileSchema) ProtoMessage() {} + +func (x *FileSchema) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileSchema.ProtoReflect.Descriptor instead. +func (*FileSchema) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{11} +} + +func (x *FileSchema) GetFormat() string { + if x != nil { + return x.Format + } + return "" +} + +func (x *FileSchema) GetTitle() string { + if x != nil { + return x.Title + } + return "" +} + +func (x *FileSchema) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *FileSchema) GetDefault() *Any { + if x != nil { + return x.Default + } + return nil +} + +func (x *FileSchema) GetRequired() []string { + if x != nil { + return x.Required + } + return nil +} + +func (x *FileSchema) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *FileSchema) GetReadOnly() bool { + if x != nil { + return x.ReadOnly + } + return false +} + +func (x *FileSchema) GetExternalDocs() *ExternalDocs { + if x != nil { + return x.ExternalDocs + } + return nil +} + +func (x *FileSchema) GetExample() *Any { + if x != nil { + return x.Example + } + return nil +} + +func (x *FileSchema) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type FormDataParameterSubSchema struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,1,opt,name=required,proto3" json:"required,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,2,opt,name=in,proto3" json:"in,omitempty"` + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // allows sending a parameter by name only or with an empty value. + AllowEmptyValue bool `protobuf:"varint,5,opt,name=allow_empty_value,json=allowEmptyValue,proto3" json:"allow_empty_value,omitempty"` + Type string `protobuf:"bytes,6,opt,name=type,proto3" json:"type,omitempty"` + Format string `protobuf:"bytes,7,opt,name=format,proto3" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,8,opt,name=items,proto3" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,9,opt,name=collection_format,json=collectionFormat,proto3" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,10,opt,name=default,proto3" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,11,opt,name=maximum,proto3" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,12,opt,name=exclusive_maximum,json=exclusiveMaximum,proto3" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,13,opt,name=minimum,proto3" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,14,opt,name=exclusive_minimum,json=exclusiveMinimum,proto3" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,15,opt,name=max_length,json=maxLength,proto3" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,16,opt,name=min_length,json=minLength,proto3" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,17,opt,name=pattern,proto3" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,18,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,19,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,20,opt,name=unique_items,json=uniqueItems,proto3" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,21,rep,name=enum,proto3" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,22,opt,name=multiple_of,json=multipleOf,proto3" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,23,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *FormDataParameterSubSchema) Reset() { + *x = FormDataParameterSubSchema{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FormDataParameterSubSchema) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FormDataParameterSubSchema) ProtoMessage() {} + +func (x *FormDataParameterSubSchema) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FormDataParameterSubSchema.ProtoReflect.Descriptor instead. +func (*FormDataParameterSubSchema) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{12} +} + +func (x *FormDataParameterSubSchema) GetRequired() bool { + if x != nil { + return x.Required + } + return false +} + +func (x *FormDataParameterSubSchema) GetIn() string { + if x != nil { + return x.In + } + return "" +} + +func (x *FormDataParameterSubSchema) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *FormDataParameterSubSchema) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *FormDataParameterSubSchema) GetAllowEmptyValue() bool { + if x != nil { + return x.AllowEmptyValue + } + return false +} + +func (x *FormDataParameterSubSchema) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *FormDataParameterSubSchema) GetFormat() string { + if x != nil { + return x.Format + } + return "" +} + +func (x *FormDataParameterSubSchema) GetItems() *PrimitivesItems { + if x != nil { + return x.Items + } + return nil +} + +func (x *FormDataParameterSubSchema) GetCollectionFormat() string { + if x != nil { + return x.CollectionFormat + } + return "" +} + +func (x *FormDataParameterSubSchema) GetDefault() *Any { + if x != nil { + return x.Default + } + return nil +} + +func (x *FormDataParameterSubSchema) GetMaximum() float64 { + if x != nil { + return x.Maximum + } + return 0 +} + +func (x *FormDataParameterSubSchema) GetExclusiveMaximum() bool { + if x != nil { + return x.ExclusiveMaximum + } + return false +} + +func (x *FormDataParameterSubSchema) GetMinimum() float64 { + if x != nil { + return x.Minimum + } + return 0 +} + +func (x *FormDataParameterSubSchema) GetExclusiveMinimum() bool { + if x != nil { + return x.ExclusiveMinimum + } + return false +} + +func (x *FormDataParameterSubSchema) GetMaxLength() int64 { + if x != nil { + return x.MaxLength + } + return 0 +} + +func (x *FormDataParameterSubSchema) GetMinLength() int64 { + if x != nil { + return x.MinLength + } + return 0 +} + +func (x *FormDataParameterSubSchema) GetPattern() string { + if x != nil { + return x.Pattern + } + return "" +} + +func (x *FormDataParameterSubSchema) GetMaxItems() int64 { + if x != nil { + return x.MaxItems + } + return 0 +} + +func (x *FormDataParameterSubSchema) GetMinItems() int64 { + if x != nil { + return x.MinItems + } + return 0 +} + +func (x *FormDataParameterSubSchema) GetUniqueItems() bool { + if x != nil { + return x.UniqueItems + } + return false +} + +func (x *FormDataParameterSubSchema) GetEnum() []*Any { + if x != nil { + return x.Enum + } + return nil +} + +func (x *FormDataParameterSubSchema) GetMultipleOf() float64 { + if x != nil { + return x.MultipleOf + } + return 0 +} + +func (x *FormDataParameterSubSchema) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Header struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + Format string `protobuf:"bytes,2,opt,name=format,proto3" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,3,opt,name=items,proto3" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,4,opt,name=collection_format,json=collectionFormat,proto3" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,5,opt,name=default,proto3" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,6,opt,name=maximum,proto3" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,7,opt,name=exclusive_maximum,json=exclusiveMaximum,proto3" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,8,opt,name=minimum,proto3" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,9,opt,name=exclusive_minimum,json=exclusiveMinimum,proto3" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,10,opt,name=max_length,json=maxLength,proto3" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,11,opt,name=min_length,json=minLength,proto3" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,12,opt,name=pattern,proto3" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,13,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,14,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,15,opt,name=unique_items,json=uniqueItems,proto3" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,16,rep,name=enum,proto3" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,17,opt,name=multiple_of,json=multipleOf,proto3" json:"multiple_of,omitempty"` + Description string `protobuf:"bytes,18,opt,name=description,proto3" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,19,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Header) Reset() { + *x = Header{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Header) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Header) ProtoMessage() {} + +func (x *Header) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Header.ProtoReflect.Descriptor instead. +func (*Header) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{13} +} + +func (x *Header) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *Header) GetFormat() string { + if x != nil { + return x.Format + } + return "" +} + +func (x *Header) GetItems() *PrimitivesItems { + if x != nil { + return x.Items + } + return nil +} + +func (x *Header) GetCollectionFormat() string { + if x != nil { + return x.CollectionFormat + } + return "" +} + +func (x *Header) GetDefault() *Any { + if x != nil { + return x.Default + } + return nil +} + +func (x *Header) GetMaximum() float64 { + if x != nil { + return x.Maximum + } + return 0 +} + +func (x *Header) GetExclusiveMaximum() bool { + if x != nil { + return x.ExclusiveMaximum + } + return false +} + +func (x *Header) GetMinimum() float64 { + if x != nil { + return x.Minimum + } + return 0 +} + +func (x *Header) GetExclusiveMinimum() bool { + if x != nil { + return x.ExclusiveMinimum + } + return false +} + +func (x *Header) GetMaxLength() int64 { + if x != nil { + return x.MaxLength + } + return 0 +} + +func (x *Header) GetMinLength() int64 { + if x != nil { + return x.MinLength + } + return 0 +} + +func (x *Header) GetPattern() string { + if x != nil { + return x.Pattern + } + return "" +} + +func (x *Header) GetMaxItems() int64 { + if x != nil { + return x.MaxItems + } + return 0 +} + +func (x *Header) GetMinItems() int64 { + if x != nil { + return x.MinItems + } + return 0 +} + +func (x *Header) GetUniqueItems() bool { + if x != nil { + return x.UniqueItems + } + return false +} + +func (x *Header) GetEnum() []*Any { + if x != nil { + return x.Enum + } + return nil +} + +func (x *Header) GetMultipleOf() float64 { + if x != nil { + return x.MultipleOf + } + return 0 +} + +func (x *Header) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Header) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type HeaderParameterSubSchema struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,1,opt,name=required,proto3" json:"required,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,2,opt,name=in,proto3" json:"in,omitempty"` + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty"` + Format string `protobuf:"bytes,6,opt,name=format,proto3" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,7,opt,name=items,proto3" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,8,opt,name=collection_format,json=collectionFormat,proto3" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,9,opt,name=default,proto3" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,10,opt,name=maximum,proto3" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,11,opt,name=exclusive_maximum,json=exclusiveMaximum,proto3" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,12,opt,name=minimum,proto3" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,13,opt,name=exclusive_minimum,json=exclusiveMinimum,proto3" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,14,opt,name=max_length,json=maxLength,proto3" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,15,opt,name=min_length,json=minLength,proto3" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,16,opt,name=pattern,proto3" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,17,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,18,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,19,opt,name=unique_items,json=uniqueItems,proto3" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,20,rep,name=enum,proto3" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,21,opt,name=multiple_of,json=multipleOf,proto3" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,22,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *HeaderParameterSubSchema) Reset() { + *x = HeaderParameterSubSchema{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *HeaderParameterSubSchema) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*HeaderParameterSubSchema) ProtoMessage() {} + +func (x *HeaderParameterSubSchema) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use HeaderParameterSubSchema.ProtoReflect.Descriptor instead. +func (*HeaderParameterSubSchema) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{14} +} + +func (x *HeaderParameterSubSchema) GetRequired() bool { + if x != nil { + return x.Required + } + return false +} + +func (x *HeaderParameterSubSchema) GetIn() string { + if x != nil { + return x.In + } + return "" +} + +func (x *HeaderParameterSubSchema) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *HeaderParameterSubSchema) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *HeaderParameterSubSchema) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *HeaderParameterSubSchema) GetFormat() string { + if x != nil { + return x.Format + } + return "" +} + +func (x *HeaderParameterSubSchema) GetItems() *PrimitivesItems { + if x != nil { + return x.Items + } + return nil +} + +func (x *HeaderParameterSubSchema) GetCollectionFormat() string { + if x != nil { + return x.CollectionFormat + } + return "" +} + +func (x *HeaderParameterSubSchema) GetDefault() *Any { + if x != nil { + return x.Default + } + return nil +} + +func (x *HeaderParameterSubSchema) GetMaximum() float64 { + if x != nil { + return x.Maximum + } + return 0 +} + +func (x *HeaderParameterSubSchema) GetExclusiveMaximum() bool { + if x != nil { + return x.ExclusiveMaximum + } + return false +} + +func (x *HeaderParameterSubSchema) GetMinimum() float64 { + if x != nil { + return x.Minimum + } + return 0 +} + +func (x *HeaderParameterSubSchema) GetExclusiveMinimum() bool { + if x != nil { + return x.ExclusiveMinimum + } + return false +} + +func (x *HeaderParameterSubSchema) GetMaxLength() int64 { + if x != nil { + return x.MaxLength + } + return 0 +} + +func (x *HeaderParameterSubSchema) GetMinLength() int64 { + if x != nil { + return x.MinLength + } + return 0 +} + +func (x *HeaderParameterSubSchema) GetPattern() string { + if x != nil { + return x.Pattern + } + return "" +} + +func (x *HeaderParameterSubSchema) GetMaxItems() int64 { + if x != nil { + return x.MaxItems + } + return 0 +} + +func (x *HeaderParameterSubSchema) GetMinItems() int64 { + if x != nil { + return x.MinItems + } + return 0 +} + +func (x *HeaderParameterSubSchema) GetUniqueItems() bool { + if x != nil { + return x.UniqueItems + } + return false +} + +func (x *HeaderParameterSubSchema) GetEnum() []*Any { + if x != nil { + return x.Enum + } + return nil +} + +func (x *HeaderParameterSubSchema) GetMultipleOf() float64 { + if x != nil { + return x.MultipleOf + } + return 0 +} + +func (x *HeaderParameterSubSchema) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Headers struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedHeader `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *Headers) Reset() { + *x = Headers{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Headers) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Headers) ProtoMessage() {} + +func (x *Headers) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Headers.ProtoReflect.Descriptor instead. +func (*Headers) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{15} +} + +func (x *Headers) GetAdditionalProperties() []*NamedHeader { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +// General information about the API. +type Info struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A unique and precise title of the API. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // A semantic version number of the API. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The terms of service for the API. + TermsOfService string `protobuf:"bytes,4,opt,name=terms_of_service,json=termsOfService,proto3" json:"terms_of_service,omitempty"` + Contact *Contact `protobuf:"bytes,5,opt,name=contact,proto3" json:"contact,omitempty"` + License *License `protobuf:"bytes,6,opt,name=license,proto3" json:"license,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,7,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Info) Reset() { + *x = Info{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Info) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Info) ProtoMessage() {} + +func (x *Info) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Info.ProtoReflect.Descriptor instead. +func (*Info) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{16} +} + +func (x *Info) GetTitle() string { + if x != nil { + return x.Title + } + return "" +} + +func (x *Info) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *Info) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Info) GetTermsOfService() string { + if x != nil { + return x.TermsOfService + } + return "" +} + +func (x *Info) GetContact() *Contact { + if x != nil { + return x.Contact + } + return nil +} + +func (x *Info) GetLicense() *License { + if x != nil { + return x.License + } + return nil +} + +func (x *Info) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type ItemsItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Schema []*Schema `protobuf:"bytes,1,rep,name=schema,proto3" json:"schema,omitempty"` +} + +func (x *ItemsItem) Reset() { + *x = ItemsItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ItemsItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ItemsItem) ProtoMessage() {} + +func (x *ItemsItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ItemsItem.ProtoReflect.Descriptor instead. +func (*ItemsItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{17} +} + +func (x *ItemsItem) GetSchema() []*Schema { + if x != nil { + return x.Schema + } + return nil +} + +type JsonReference struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref,proto3" json:"_ref,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` +} + +func (x *JsonReference) Reset() { + *x = JsonReference{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JsonReference) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JsonReference) ProtoMessage() {} + +func (x *JsonReference) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JsonReference.ProtoReflect.Descriptor instead. +func (*JsonReference) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{18} +} + +func (x *JsonReference) GetXRef() string { + if x != nil { + return x.XRef + } + return "" +} + +func (x *JsonReference) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +type License struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The name of the license type. It's encouraged to use an OSI compatible license. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The URL pointing to the license. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,3,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *License) Reset() { + *x = License{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *License) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*License) ProtoMessage() {} + +func (x *License) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use License.ProtoReflect.Descriptor instead. +func (*License) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{19} +} + +func (x *License) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *License) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *License) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +type NamedAny struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *Any `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedAny) Reset() { + *x = NamedAny{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedAny) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedAny) ProtoMessage() {} + +func (x *NamedAny) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedAny.ProtoReflect.Descriptor instead. +func (*NamedAny) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{20} +} + +func (x *NamedAny) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedAny) GetValue() *Any { + if x != nil { + return x.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs. +type NamedHeader struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *Header `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedHeader) Reset() { + *x = NamedHeader{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedHeader) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedHeader) ProtoMessage() {} + +func (x *NamedHeader) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedHeader.ProtoReflect.Descriptor instead. +func (*NamedHeader) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{21} +} + +func (x *NamedHeader) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedHeader) GetValue() *Header { + if x != nil { + return x.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +type NamedParameter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *Parameter `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedParameter) Reset() { + *x = NamedParameter{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedParameter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedParameter) ProtoMessage() {} + +func (x *NamedParameter) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedParameter.ProtoReflect.Descriptor instead. +func (*NamedParameter) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{22} +} + +func (x *NamedParameter) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedParameter) GetValue() *Parameter { + if x != nil { + return x.Value + } + return nil +} + +// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +type NamedPathItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *PathItem `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedPathItem) Reset() { + *x = NamedPathItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedPathItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedPathItem) ProtoMessage() {} + +func (x *NamedPathItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedPathItem.ProtoReflect.Descriptor instead. +func (*NamedPathItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{23} +} + +func (x *NamedPathItem) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedPathItem) GetValue() *PathItem { + if x != nil { + return x.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs. +type NamedResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *Response `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedResponse) Reset() { + *x = NamedResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedResponse) ProtoMessage() {} + +func (x *NamedResponse) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedResponse.ProtoReflect.Descriptor instead. +func (*NamedResponse) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{24} +} + +func (x *NamedResponse) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedResponse) GetValue() *Response { + if x != nil { + return x.Value + } + return nil +} + +// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs. +type NamedResponseValue struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *ResponseValue `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedResponseValue) Reset() { + *x = NamedResponseValue{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedResponseValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedResponseValue) ProtoMessage() {} + +func (x *NamedResponseValue) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedResponseValue.ProtoReflect.Descriptor instead. +func (*NamedResponseValue) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{25} +} + +func (x *NamedResponseValue) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedResponseValue) GetValue() *ResponseValue { + if x != nil { + return x.Value + } + return nil +} + +// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +type NamedSchema struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *Schema `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedSchema) Reset() { + *x = NamedSchema{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedSchema) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedSchema) ProtoMessage() {} + +func (x *NamedSchema) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedSchema.ProtoReflect.Descriptor instead. +func (*NamedSchema) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{26} +} + +func (x *NamedSchema) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedSchema) GetValue() *Schema { + if x != nil { + return x.Value + } + return nil +} + +// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs. +type NamedSecurityDefinitionsItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *SecurityDefinitionsItem `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedSecurityDefinitionsItem) Reset() { + *x = NamedSecurityDefinitionsItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedSecurityDefinitionsItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedSecurityDefinitionsItem) ProtoMessage() {} + +func (x *NamedSecurityDefinitionsItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[27] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedSecurityDefinitionsItem.ProtoReflect.Descriptor instead. +func (*NamedSecurityDefinitionsItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{27} +} + +func (x *NamedSecurityDefinitionsItem) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedSecurityDefinitionsItem) GetValue() *SecurityDefinitionsItem { + if x != nil { + return x.Value + } + return nil +} + +// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +type NamedString struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedString) Reset() { + *x = NamedString{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedString) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedString) ProtoMessage() {} + +func (x *NamedString) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[28] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedString.ProtoReflect.Descriptor instead. +func (*NamedString) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{28} +} + +func (x *NamedString) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedString) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs. +type NamedStringArray struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Map key + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mapped value + Value *StringArray `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *NamedStringArray) Reset() { + *x = NamedStringArray{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NamedStringArray) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NamedStringArray) ProtoMessage() {} + +func (x *NamedStringArray) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[29] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NamedStringArray.ProtoReflect.Descriptor instead. +func (*NamedStringArray) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{29} +} + +func (x *NamedStringArray) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *NamedStringArray) GetValue() *StringArray { + if x != nil { + return x.Value + } + return nil +} + +type NonBodyParameter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Oneof: + // *NonBodyParameter_HeaderParameterSubSchema + // *NonBodyParameter_FormDataParameterSubSchema + // *NonBodyParameter_QueryParameterSubSchema + // *NonBodyParameter_PathParameterSubSchema + Oneof isNonBodyParameter_Oneof `protobuf_oneof:"oneof"` +} + +func (x *NonBodyParameter) Reset() { + *x = NonBodyParameter{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NonBodyParameter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NonBodyParameter) ProtoMessage() {} + +func (x *NonBodyParameter) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[30] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NonBodyParameter.ProtoReflect.Descriptor instead. +func (*NonBodyParameter) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{30} +} + +func (m *NonBodyParameter) GetOneof() isNonBodyParameter_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (x *NonBodyParameter) GetHeaderParameterSubSchema() *HeaderParameterSubSchema { + if x, ok := x.GetOneof().(*NonBodyParameter_HeaderParameterSubSchema); ok { + return x.HeaderParameterSubSchema + } + return nil +} + +func (x *NonBodyParameter) GetFormDataParameterSubSchema() *FormDataParameterSubSchema { + if x, ok := x.GetOneof().(*NonBodyParameter_FormDataParameterSubSchema); ok { + return x.FormDataParameterSubSchema + } + return nil +} + +func (x *NonBodyParameter) GetQueryParameterSubSchema() *QueryParameterSubSchema { + if x, ok := x.GetOneof().(*NonBodyParameter_QueryParameterSubSchema); ok { + return x.QueryParameterSubSchema + } + return nil +} + +func (x *NonBodyParameter) GetPathParameterSubSchema() *PathParameterSubSchema { + if x, ok := x.GetOneof().(*NonBodyParameter_PathParameterSubSchema); ok { + return x.PathParameterSubSchema + } + return nil +} + +type isNonBodyParameter_Oneof interface { + isNonBodyParameter_Oneof() +} + +type NonBodyParameter_HeaderParameterSubSchema struct { + HeaderParameterSubSchema *HeaderParameterSubSchema `protobuf:"bytes,1,opt,name=header_parameter_sub_schema,json=headerParameterSubSchema,proto3,oneof"` +} + +type NonBodyParameter_FormDataParameterSubSchema struct { + FormDataParameterSubSchema *FormDataParameterSubSchema `protobuf:"bytes,2,opt,name=form_data_parameter_sub_schema,json=formDataParameterSubSchema,proto3,oneof"` +} + +type NonBodyParameter_QueryParameterSubSchema struct { + QueryParameterSubSchema *QueryParameterSubSchema `protobuf:"bytes,3,opt,name=query_parameter_sub_schema,json=queryParameterSubSchema,proto3,oneof"` +} + +type NonBodyParameter_PathParameterSubSchema struct { + PathParameterSubSchema *PathParameterSubSchema `protobuf:"bytes,4,opt,name=path_parameter_sub_schema,json=pathParameterSubSchema,proto3,oneof"` +} + +func (*NonBodyParameter_HeaderParameterSubSchema) isNonBodyParameter_Oneof() {} + +func (*NonBodyParameter_FormDataParameterSubSchema) isNonBodyParameter_Oneof() {} + +func (*NonBodyParameter_QueryParameterSubSchema) isNonBodyParameter_Oneof() {} + +func (*NonBodyParameter_PathParameterSubSchema) isNonBodyParameter_Oneof() {} + +type Oauth2AccessCodeSecurity struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + Flow string `protobuf:"bytes,2,opt,name=flow,proto3" json:"flow,omitempty"` + Scopes *Oauth2Scopes `protobuf:"bytes,3,opt,name=scopes,proto3" json:"scopes,omitempty"` + AuthorizationUrl string `protobuf:"bytes,4,opt,name=authorization_url,json=authorizationUrl,proto3" json:"authorization_url,omitempty"` + TokenUrl string `protobuf:"bytes,5,opt,name=token_url,json=tokenUrl,proto3" json:"token_url,omitempty"` + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,7,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Oauth2AccessCodeSecurity) Reset() { + *x = Oauth2AccessCodeSecurity{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Oauth2AccessCodeSecurity) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Oauth2AccessCodeSecurity) ProtoMessage() {} + +func (x *Oauth2AccessCodeSecurity) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[31] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Oauth2AccessCodeSecurity.ProtoReflect.Descriptor instead. +func (*Oauth2AccessCodeSecurity) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{31} +} + +func (x *Oauth2AccessCodeSecurity) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *Oauth2AccessCodeSecurity) GetFlow() string { + if x != nil { + return x.Flow + } + return "" +} + +func (x *Oauth2AccessCodeSecurity) GetScopes() *Oauth2Scopes { + if x != nil { + return x.Scopes + } + return nil +} + +func (x *Oauth2AccessCodeSecurity) GetAuthorizationUrl() string { + if x != nil { + return x.AuthorizationUrl + } + return "" +} + +func (x *Oauth2AccessCodeSecurity) GetTokenUrl() string { + if x != nil { + return x.TokenUrl + } + return "" +} + +func (x *Oauth2AccessCodeSecurity) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Oauth2AccessCodeSecurity) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Oauth2ApplicationSecurity struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + Flow string `protobuf:"bytes,2,opt,name=flow,proto3" json:"flow,omitempty"` + Scopes *Oauth2Scopes `protobuf:"bytes,3,opt,name=scopes,proto3" json:"scopes,omitempty"` + TokenUrl string `protobuf:"bytes,4,opt,name=token_url,json=tokenUrl,proto3" json:"token_url,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Oauth2ApplicationSecurity) Reset() { + *x = Oauth2ApplicationSecurity{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Oauth2ApplicationSecurity) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Oauth2ApplicationSecurity) ProtoMessage() {} + +func (x *Oauth2ApplicationSecurity) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[32] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Oauth2ApplicationSecurity.ProtoReflect.Descriptor instead. +func (*Oauth2ApplicationSecurity) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{32} +} + +func (x *Oauth2ApplicationSecurity) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *Oauth2ApplicationSecurity) GetFlow() string { + if x != nil { + return x.Flow + } + return "" +} + +func (x *Oauth2ApplicationSecurity) GetScopes() *Oauth2Scopes { + if x != nil { + return x.Scopes + } + return nil +} + +func (x *Oauth2ApplicationSecurity) GetTokenUrl() string { + if x != nil { + return x.TokenUrl + } + return "" +} + +func (x *Oauth2ApplicationSecurity) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Oauth2ApplicationSecurity) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Oauth2ImplicitSecurity struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + Flow string `protobuf:"bytes,2,opt,name=flow,proto3" json:"flow,omitempty"` + Scopes *Oauth2Scopes `protobuf:"bytes,3,opt,name=scopes,proto3" json:"scopes,omitempty"` + AuthorizationUrl string `protobuf:"bytes,4,opt,name=authorization_url,json=authorizationUrl,proto3" json:"authorization_url,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Oauth2ImplicitSecurity) Reset() { + *x = Oauth2ImplicitSecurity{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Oauth2ImplicitSecurity) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Oauth2ImplicitSecurity) ProtoMessage() {} + +func (x *Oauth2ImplicitSecurity) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[33] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Oauth2ImplicitSecurity.ProtoReflect.Descriptor instead. +func (*Oauth2ImplicitSecurity) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{33} +} + +func (x *Oauth2ImplicitSecurity) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *Oauth2ImplicitSecurity) GetFlow() string { + if x != nil { + return x.Flow + } + return "" +} + +func (x *Oauth2ImplicitSecurity) GetScopes() *Oauth2Scopes { + if x != nil { + return x.Scopes + } + return nil +} + +func (x *Oauth2ImplicitSecurity) GetAuthorizationUrl() string { + if x != nil { + return x.AuthorizationUrl + } + return "" +} + +func (x *Oauth2ImplicitSecurity) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Oauth2ImplicitSecurity) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Oauth2PasswordSecurity struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + Flow string `protobuf:"bytes,2,opt,name=flow,proto3" json:"flow,omitempty"` + Scopes *Oauth2Scopes `protobuf:"bytes,3,opt,name=scopes,proto3" json:"scopes,omitempty"` + TokenUrl string `protobuf:"bytes,4,opt,name=token_url,json=tokenUrl,proto3" json:"token_url,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Oauth2PasswordSecurity) Reset() { + *x = Oauth2PasswordSecurity{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Oauth2PasswordSecurity) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Oauth2PasswordSecurity) ProtoMessage() {} + +func (x *Oauth2PasswordSecurity) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[34] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Oauth2PasswordSecurity.ProtoReflect.Descriptor instead. +func (*Oauth2PasswordSecurity) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{34} +} + +func (x *Oauth2PasswordSecurity) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *Oauth2PasswordSecurity) GetFlow() string { + if x != nil { + return x.Flow + } + return "" +} + +func (x *Oauth2PasswordSecurity) GetScopes() *Oauth2Scopes { + if x != nil { + return x.Scopes + } + return nil +} + +func (x *Oauth2PasswordSecurity) GetTokenUrl() string { + if x != nil { + return x.TokenUrl + } + return "" +} + +func (x *Oauth2PasswordSecurity) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Oauth2PasswordSecurity) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Oauth2Scopes struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedString `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *Oauth2Scopes) Reset() { + *x = Oauth2Scopes{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Oauth2Scopes) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Oauth2Scopes) ProtoMessage() {} + +func (x *Oauth2Scopes) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[35] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Oauth2Scopes.ProtoReflect.Descriptor instead. +func (*Oauth2Scopes) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{35} +} + +func (x *Oauth2Scopes) GetAdditionalProperties() []*NamedString { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +type Operation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tags []string `protobuf:"bytes,1,rep,name=tags,proto3" json:"tags,omitempty"` + // A brief summary of the operation. + Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"` + // A longer description of the operation, GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,4,opt,name=external_docs,json=externalDocs,proto3" json:"external_docs,omitempty"` + // A unique identifier of the operation. + OperationId string `protobuf:"bytes,5,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // A list of MIME types the API can produce. + Produces []string `protobuf:"bytes,6,rep,name=produces,proto3" json:"produces,omitempty"` + // A list of MIME types the API can consume. + Consumes []string `protobuf:"bytes,7,rep,name=consumes,proto3" json:"consumes,omitempty"` + // The parameters needed to send a valid API call. + Parameters []*ParametersItem `protobuf:"bytes,8,rep,name=parameters,proto3" json:"parameters,omitempty"` + Responses *Responses `protobuf:"bytes,9,opt,name=responses,proto3" json:"responses,omitempty"` + // The transfer protocol of the API. + Schemes []string `protobuf:"bytes,10,rep,name=schemes,proto3" json:"schemes,omitempty"` + Deprecated bool `protobuf:"varint,11,opt,name=deprecated,proto3" json:"deprecated,omitempty"` + Security []*SecurityRequirement `protobuf:"bytes,12,rep,name=security,proto3" json:"security,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,13,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Operation) Reset() { + *x = Operation{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[36] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Operation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Operation) ProtoMessage() {} + +func (x *Operation) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[36] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Operation.ProtoReflect.Descriptor instead. +func (*Operation) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{36} +} + +func (x *Operation) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +func (x *Operation) GetSummary() string { + if x != nil { + return x.Summary + } + return "" +} + +func (x *Operation) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Operation) GetExternalDocs() *ExternalDocs { + if x != nil { + return x.ExternalDocs + } + return nil +} + +func (x *Operation) GetOperationId() string { + if x != nil { + return x.OperationId + } + return "" +} + +func (x *Operation) GetProduces() []string { + if x != nil { + return x.Produces + } + return nil +} + +func (x *Operation) GetConsumes() []string { + if x != nil { + return x.Consumes + } + return nil +} + +func (x *Operation) GetParameters() []*ParametersItem { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *Operation) GetResponses() *Responses { + if x != nil { + return x.Responses + } + return nil +} + +func (x *Operation) GetSchemes() []string { + if x != nil { + return x.Schemes + } + return nil +} + +func (x *Operation) GetDeprecated() bool { + if x != nil { + return x.Deprecated + } + return false +} + +func (x *Operation) GetSecurity() []*SecurityRequirement { + if x != nil { + return x.Security + } + return nil +} + +func (x *Operation) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Parameter struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Oneof: + // *Parameter_BodyParameter + // *Parameter_NonBodyParameter + Oneof isParameter_Oneof `protobuf_oneof:"oneof"` +} + +func (x *Parameter) Reset() { + *x = Parameter{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Parameter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Parameter) ProtoMessage() {} + +func (x *Parameter) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[37] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Parameter.ProtoReflect.Descriptor instead. +func (*Parameter) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{37} +} + +func (m *Parameter) GetOneof() isParameter_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (x *Parameter) GetBodyParameter() *BodyParameter { + if x, ok := x.GetOneof().(*Parameter_BodyParameter); ok { + return x.BodyParameter + } + return nil +} + +func (x *Parameter) GetNonBodyParameter() *NonBodyParameter { + if x, ok := x.GetOneof().(*Parameter_NonBodyParameter); ok { + return x.NonBodyParameter + } + return nil +} + +type isParameter_Oneof interface { + isParameter_Oneof() +} + +type Parameter_BodyParameter struct { + BodyParameter *BodyParameter `protobuf:"bytes,1,opt,name=body_parameter,json=bodyParameter,proto3,oneof"` +} + +type Parameter_NonBodyParameter struct { + NonBodyParameter *NonBodyParameter `protobuf:"bytes,2,opt,name=non_body_parameter,json=nonBodyParameter,proto3,oneof"` +} + +func (*Parameter_BodyParameter) isParameter_Oneof() {} + +func (*Parameter_NonBodyParameter) isParameter_Oneof() {} + +// One or more JSON representations for parameters +type ParameterDefinitions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedParameter `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *ParameterDefinitions) Reset() { + *x = ParameterDefinitions{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ParameterDefinitions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ParameterDefinitions) ProtoMessage() {} + +func (x *ParameterDefinitions) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[38] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ParameterDefinitions.ProtoReflect.Descriptor instead. +func (*ParameterDefinitions) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{38} +} + +func (x *ParameterDefinitions) GetAdditionalProperties() []*NamedParameter { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +type ParametersItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Oneof: + // *ParametersItem_Parameter + // *ParametersItem_JsonReference + Oneof isParametersItem_Oneof `protobuf_oneof:"oneof"` +} + +func (x *ParametersItem) Reset() { + *x = ParametersItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[39] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ParametersItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ParametersItem) ProtoMessage() {} + +func (x *ParametersItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[39] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ParametersItem.ProtoReflect.Descriptor instead. +func (*ParametersItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{39} +} + +func (m *ParametersItem) GetOneof() isParametersItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (x *ParametersItem) GetParameter() *Parameter { + if x, ok := x.GetOneof().(*ParametersItem_Parameter); ok { + return x.Parameter + } + return nil +} + +func (x *ParametersItem) GetJsonReference() *JsonReference { + if x, ok := x.GetOneof().(*ParametersItem_JsonReference); ok { + return x.JsonReference + } + return nil +} + +type isParametersItem_Oneof interface { + isParametersItem_Oneof() +} + +type ParametersItem_Parameter struct { + Parameter *Parameter `protobuf:"bytes,1,opt,name=parameter,proto3,oneof"` +} + +type ParametersItem_JsonReference struct { + JsonReference *JsonReference `protobuf:"bytes,2,opt,name=json_reference,json=jsonReference,proto3,oneof"` +} + +func (*ParametersItem_Parameter) isParametersItem_Oneof() {} + +func (*ParametersItem_JsonReference) isParametersItem_Oneof() {} + +type PathItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref,proto3" json:"_ref,omitempty"` + Get *Operation `protobuf:"bytes,2,opt,name=get,proto3" json:"get,omitempty"` + Put *Operation `protobuf:"bytes,3,opt,name=put,proto3" json:"put,omitempty"` + Post *Operation `protobuf:"bytes,4,opt,name=post,proto3" json:"post,omitempty"` + Delete *Operation `protobuf:"bytes,5,opt,name=delete,proto3" json:"delete,omitempty"` + Options *Operation `protobuf:"bytes,6,opt,name=options,proto3" json:"options,omitempty"` + Head *Operation `protobuf:"bytes,7,opt,name=head,proto3" json:"head,omitempty"` + Patch *Operation `protobuf:"bytes,8,opt,name=patch,proto3" json:"patch,omitempty"` + // The parameters needed to send a valid API call. + Parameters []*ParametersItem `protobuf:"bytes,9,rep,name=parameters,proto3" json:"parameters,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,10,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *PathItem) Reset() { + *x = PathItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[40] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PathItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PathItem) ProtoMessage() {} + +func (x *PathItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[40] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PathItem.ProtoReflect.Descriptor instead. +func (*PathItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{40} +} + +func (x *PathItem) GetXRef() string { + if x != nil { + return x.XRef + } + return "" +} + +func (x *PathItem) GetGet() *Operation { + if x != nil { + return x.Get + } + return nil +} + +func (x *PathItem) GetPut() *Operation { + if x != nil { + return x.Put + } + return nil +} + +func (x *PathItem) GetPost() *Operation { + if x != nil { + return x.Post + } + return nil +} + +func (x *PathItem) GetDelete() *Operation { + if x != nil { + return x.Delete + } + return nil +} + +func (x *PathItem) GetOptions() *Operation { + if x != nil { + return x.Options + } + return nil +} + +func (x *PathItem) GetHead() *Operation { + if x != nil { + return x.Head + } + return nil +} + +func (x *PathItem) GetPatch() *Operation { + if x != nil { + return x.Patch + } + return nil +} + +func (x *PathItem) GetParameters() []*ParametersItem { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *PathItem) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type PathParameterSubSchema struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,1,opt,name=required,proto3" json:"required,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,2,opt,name=in,proto3" json:"in,omitempty"` + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty"` + Format string `protobuf:"bytes,6,opt,name=format,proto3" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,7,opt,name=items,proto3" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,8,opt,name=collection_format,json=collectionFormat,proto3" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,9,opt,name=default,proto3" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,10,opt,name=maximum,proto3" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,11,opt,name=exclusive_maximum,json=exclusiveMaximum,proto3" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,12,opt,name=minimum,proto3" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,13,opt,name=exclusive_minimum,json=exclusiveMinimum,proto3" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,14,opt,name=max_length,json=maxLength,proto3" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,15,opt,name=min_length,json=minLength,proto3" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,16,opt,name=pattern,proto3" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,17,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,18,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,19,opt,name=unique_items,json=uniqueItems,proto3" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,20,rep,name=enum,proto3" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,21,opt,name=multiple_of,json=multipleOf,proto3" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,22,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *PathParameterSubSchema) Reset() { + *x = PathParameterSubSchema{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[41] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PathParameterSubSchema) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PathParameterSubSchema) ProtoMessage() {} + +func (x *PathParameterSubSchema) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[41] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PathParameterSubSchema.ProtoReflect.Descriptor instead. +func (*PathParameterSubSchema) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{41} +} + +func (x *PathParameterSubSchema) GetRequired() bool { + if x != nil { + return x.Required + } + return false +} + +func (x *PathParameterSubSchema) GetIn() string { + if x != nil { + return x.In + } + return "" +} + +func (x *PathParameterSubSchema) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *PathParameterSubSchema) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *PathParameterSubSchema) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *PathParameterSubSchema) GetFormat() string { + if x != nil { + return x.Format + } + return "" +} + +func (x *PathParameterSubSchema) GetItems() *PrimitivesItems { + if x != nil { + return x.Items + } + return nil +} + +func (x *PathParameterSubSchema) GetCollectionFormat() string { + if x != nil { + return x.CollectionFormat + } + return "" +} + +func (x *PathParameterSubSchema) GetDefault() *Any { + if x != nil { + return x.Default + } + return nil +} + +func (x *PathParameterSubSchema) GetMaximum() float64 { + if x != nil { + return x.Maximum + } + return 0 +} + +func (x *PathParameterSubSchema) GetExclusiveMaximum() bool { + if x != nil { + return x.ExclusiveMaximum + } + return false +} + +func (x *PathParameterSubSchema) GetMinimum() float64 { + if x != nil { + return x.Minimum + } + return 0 +} + +func (x *PathParameterSubSchema) GetExclusiveMinimum() bool { + if x != nil { + return x.ExclusiveMinimum + } + return false +} + +func (x *PathParameterSubSchema) GetMaxLength() int64 { + if x != nil { + return x.MaxLength + } + return 0 +} + +func (x *PathParameterSubSchema) GetMinLength() int64 { + if x != nil { + return x.MinLength + } + return 0 +} + +func (x *PathParameterSubSchema) GetPattern() string { + if x != nil { + return x.Pattern + } + return "" +} + +func (x *PathParameterSubSchema) GetMaxItems() int64 { + if x != nil { + return x.MaxItems + } + return 0 +} + +func (x *PathParameterSubSchema) GetMinItems() int64 { + if x != nil { + return x.MinItems + } + return 0 +} + +func (x *PathParameterSubSchema) GetUniqueItems() bool { + if x != nil { + return x.UniqueItems + } + return false +} + +func (x *PathParameterSubSchema) GetEnum() []*Any { + if x != nil { + return x.Enum + } + return nil +} + +func (x *PathParameterSubSchema) GetMultipleOf() float64 { + if x != nil { + return x.MultipleOf + } + return 0 +} + +func (x *PathParameterSubSchema) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +// Relative paths to the individual endpoints. They must be relative to the 'basePath'. +type Paths struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + VendorExtension []*NamedAny `protobuf:"bytes,1,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` + Path []*NamedPathItem `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"` +} + +func (x *Paths) Reset() { + *x = Paths{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[42] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Paths) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Paths) ProtoMessage() {} + +func (x *Paths) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[42] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Paths.ProtoReflect.Descriptor instead. +func (*Paths) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{42} +} + +func (x *Paths) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +func (x *Paths) GetPath() []*NamedPathItem { + if x != nil { + return x.Path + } + return nil +} + +type PrimitivesItems struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + Format string `protobuf:"bytes,2,opt,name=format,proto3" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,3,opt,name=items,proto3" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,4,opt,name=collection_format,json=collectionFormat,proto3" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,5,opt,name=default,proto3" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,6,opt,name=maximum,proto3" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,7,opt,name=exclusive_maximum,json=exclusiveMaximum,proto3" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,8,opt,name=minimum,proto3" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,9,opt,name=exclusive_minimum,json=exclusiveMinimum,proto3" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,10,opt,name=max_length,json=maxLength,proto3" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,11,opt,name=min_length,json=minLength,proto3" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,12,opt,name=pattern,proto3" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,13,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,14,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,15,opt,name=unique_items,json=uniqueItems,proto3" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,16,rep,name=enum,proto3" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,17,opt,name=multiple_of,json=multipleOf,proto3" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,18,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *PrimitivesItems) Reset() { + *x = PrimitivesItems{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[43] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PrimitivesItems) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PrimitivesItems) ProtoMessage() {} + +func (x *PrimitivesItems) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[43] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PrimitivesItems.ProtoReflect.Descriptor instead. +func (*PrimitivesItems) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{43} +} + +func (x *PrimitivesItems) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *PrimitivesItems) GetFormat() string { + if x != nil { + return x.Format + } + return "" +} + +func (x *PrimitivesItems) GetItems() *PrimitivesItems { + if x != nil { + return x.Items + } + return nil +} + +func (x *PrimitivesItems) GetCollectionFormat() string { + if x != nil { + return x.CollectionFormat + } + return "" +} + +func (x *PrimitivesItems) GetDefault() *Any { + if x != nil { + return x.Default + } + return nil +} + +func (x *PrimitivesItems) GetMaximum() float64 { + if x != nil { + return x.Maximum + } + return 0 +} + +func (x *PrimitivesItems) GetExclusiveMaximum() bool { + if x != nil { + return x.ExclusiveMaximum + } + return false +} + +func (x *PrimitivesItems) GetMinimum() float64 { + if x != nil { + return x.Minimum + } + return 0 +} + +func (x *PrimitivesItems) GetExclusiveMinimum() bool { + if x != nil { + return x.ExclusiveMinimum + } + return false +} + +func (x *PrimitivesItems) GetMaxLength() int64 { + if x != nil { + return x.MaxLength + } + return 0 +} + +func (x *PrimitivesItems) GetMinLength() int64 { + if x != nil { + return x.MinLength + } + return 0 +} + +func (x *PrimitivesItems) GetPattern() string { + if x != nil { + return x.Pattern + } + return "" +} + +func (x *PrimitivesItems) GetMaxItems() int64 { + if x != nil { + return x.MaxItems + } + return 0 +} + +func (x *PrimitivesItems) GetMinItems() int64 { + if x != nil { + return x.MinItems + } + return 0 +} + +func (x *PrimitivesItems) GetUniqueItems() bool { + if x != nil { + return x.UniqueItems + } + return false +} + +func (x *PrimitivesItems) GetEnum() []*Any { + if x != nil { + return x.Enum + } + return nil +} + +func (x *PrimitivesItems) GetMultipleOf() float64 { + if x != nil { + return x.MultipleOf + } + return 0 +} + +func (x *PrimitivesItems) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Properties struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedSchema `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *Properties) Reset() { + *x = Properties{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[44] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Properties) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Properties) ProtoMessage() {} + +func (x *Properties) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[44] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Properties.ProtoReflect.Descriptor instead. +func (*Properties) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{44} +} + +func (x *Properties) GetAdditionalProperties() []*NamedSchema { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +type QueryParameterSubSchema struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Determines whether or not this parameter is required or optional. + Required bool `protobuf:"varint,1,opt,name=required,proto3" json:"required,omitempty"` + // Determines the location of the parameter. + In string `protobuf:"bytes,2,opt,name=in,proto3" json:"in,omitempty"` + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The name of the parameter. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // allows sending a parameter by name only or with an empty value. + AllowEmptyValue bool `protobuf:"varint,5,opt,name=allow_empty_value,json=allowEmptyValue,proto3" json:"allow_empty_value,omitempty"` + Type string `protobuf:"bytes,6,opt,name=type,proto3" json:"type,omitempty"` + Format string `protobuf:"bytes,7,opt,name=format,proto3" json:"format,omitempty"` + Items *PrimitivesItems `protobuf:"bytes,8,opt,name=items,proto3" json:"items,omitempty"` + CollectionFormat string `protobuf:"bytes,9,opt,name=collection_format,json=collectionFormat,proto3" json:"collection_format,omitempty"` + Default *Any `protobuf:"bytes,10,opt,name=default,proto3" json:"default,omitempty"` + Maximum float64 `protobuf:"fixed64,11,opt,name=maximum,proto3" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,12,opt,name=exclusive_maximum,json=exclusiveMaximum,proto3" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,13,opt,name=minimum,proto3" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,14,opt,name=exclusive_minimum,json=exclusiveMinimum,proto3" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,15,opt,name=max_length,json=maxLength,proto3" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,16,opt,name=min_length,json=minLength,proto3" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,17,opt,name=pattern,proto3" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,18,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,19,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,20,opt,name=unique_items,json=uniqueItems,proto3" json:"unique_items,omitempty"` + Enum []*Any `protobuf:"bytes,21,rep,name=enum,proto3" json:"enum,omitempty"` + MultipleOf float64 `protobuf:"fixed64,22,opt,name=multiple_of,json=multipleOf,proto3" json:"multiple_of,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,23,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *QueryParameterSubSchema) Reset() { + *x = QueryParameterSubSchema{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[45] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *QueryParameterSubSchema) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*QueryParameterSubSchema) ProtoMessage() {} + +func (x *QueryParameterSubSchema) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[45] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use QueryParameterSubSchema.ProtoReflect.Descriptor instead. +func (*QueryParameterSubSchema) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{45} +} + +func (x *QueryParameterSubSchema) GetRequired() bool { + if x != nil { + return x.Required + } + return false +} + +func (x *QueryParameterSubSchema) GetIn() string { + if x != nil { + return x.In + } + return "" +} + +func (x *QueryParameterSubSchema) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *QueryParameterSubSchema) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *QueryParameterSubSchema) GetAllowEmptyValue() bool { + if x != nil { + return x.AllowEmptyValue + } + return false +} + +func (x *QueryParameterSubSchema) GetType() string { + if x != nil { + return x.Type + } + return "" +} + +func (x *QueryParameterSubSchema) GetFormat() string { + if x != nil { + return x.Format + } + return "" +} + +func (x *QueryParameterSubSchema) GetItems() *PrimitivesItems { + if x != nil { + return x.Items + } + return nil +} + +func (x *QueryParameterSubSchema) GetCollectionFormat() string { + if x != nil { + return x.CollectionFormat + } + return "" +} + +func (x *QueryParameterSubSchema) GetDefault() *Any { + if x != nil { + return x.Default + } + return nil +} + +func (x *QueryParameterSubSchema) GetMaximum() float64 { + if x != nil { + return x.Maximum + } + return 0 +} + +func (x *QueryParameterSubSchema) GetExclusiveMaximum() bool { + if x != nil { + return x.ExclusiveMaximum + } + return false +} + +func (x *QueryParameterSubSchema) GetMinimum() float64 { + if x != nil { + return x.Minimum + } + return 0 +} + +func (x *QueryParameterSubSchema) GetExclusiveMinimum() bool { + if x != nil { + return x.ExclusiveMinimum + } + return false +} + +func (x *QueryParameterSubSchema) GetMaxLength() int64 { + if x != nil { + return x.MaxLength + } + return 0 +} + +func (x *QueryParameterSubSchema) GetMinLength() int64 { + if x != nil { + return x.MinLength + } + return 0 +} + +func (x *QueryParameterSubSchema) GetPattern() string { + if x != nil { + return x.Pattern + } + return "" +} + +func (x *QueryParameterSubSchema) GetMaxItems() int64 { + if x != nil { + return x.MaxItems + } + return 0 +} + +func (x *QueryParameterSubSchema) GetMinItems() int64 { + if x != nil { + return x.MinItems + } + return 0 +} + +func (x *QueryParameterSubSchema) GetUniqueItems() bool { + if x != nil { + return x.UniqueItems + } + return false +} + +func (x *QueryParameterSubSchema) GetEnum() []*Any { + if x != nil { + return x.Enum + } + return nil +} + +func (x *QueryParameterSubSchema) GetMultipleOf() float64 { + if x != nil { + return x.MultipleOf + } + return 0 +} + +func (x *QueryParameterSubSchema) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type Response struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + Schema *SchemaItem `protobuf:"bytes,2,opt,name=schema,proto3" json:"schema,omitempty"` + Headers *Headers `protobuf:"bytes,3,opt,name=headers,proto3" json:"headers,omitempty"` + Examples *Examples `protobuf:"bytes,4,opt,name=examples,proto3" json:"examples,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,5,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Response) Reset() { + *x = Response{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[46] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Response) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Response) ProtoMessage() {} + +func (x *Response) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[46] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Response.ProtoReflect.Descriptor instead. +func (*Response) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{46} +} + +func (x *Response) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Response) GetSchema() *SchemaItem { + if x != nil { + return x.Schema + } + return nil +} + +func (x *Response) GetHeaders() *Headers { + if x != nil { + return x.Headers + } + return nil +} + +func (x *Response) GetExamples() *Examples { + if x != nil { + return x.Examples + } + return nil +} + +func (x *Response) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +// One or more JSON representations for responses +type ResponseDefinitions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedResponse `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *ResponseDefinitions) Reset() { + *x = ResponseDefinitions{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[47] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ResponseDefinitions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ResponseDefinitions) ProtoMessage() {} + +func (x *ResponseDefinitions) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[47] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ResponseDefinitions.ProtoReflect.Descriptor instead. +func (*ResponseDefinitions) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{47} +} + +func (x *ResponseDefinitions) GetAdditionalProperties() []*NamedResponse { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +type ResponseValue struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Oneof: + // *ResponseValue_Response + // *ResponseValue_JsonReference + Oneof isResponseValue_Oneof `protobuf_oneof:"oneof"` +} + +func (x *ResponseValue) Reset() { + *x = ResponseValue{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[48] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ResponseValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ResponseValue) ProtoMessage() {} + +func (x *ResponseValue) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[48] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ResponseValue.ProtoReflect.Descriptor instead. +func (*ResponseValue) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{48} +} + +func (m *ResponseValue) GetOneof() isResponseValue_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (x *ResponseValue) GetResponse() *Response { + if x, ok := x.GetOneof().(*ResponseValue_Response); ok { + return x.Response + } + return nil +} + +func (x *ResponseValue) GetJsonReference() *JsonReference { + if x, ok := x.GetOneof().(*ResponseValue_JsonReference); ok { + return x.JsonReference + } + return nil +} + +type isResponseValue_Oneof interface { + isResponseValue_Oneof() +} + +type ResponseValue_Response struct { + Response *Response `protobuf:"bytes,1,opt,name=response,proto3,oneof"` +} + +type ResponseValue_JsonReference struct { + JsonReference *JsonReference `protobuf:"bytes,2,opt,name=json_reference,json=jsonReference,proto3,oneof"` +} + +func (*ResponseValue_Response) isResponseValue_Oneof() {} + +func (*ResponseValue_JsonReference) isResponseValue_Oneof() {} + +// Response objects names can either be any valid HTTP status code or 'default'. +type Responses struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ResponseCode []*NamedResponseValue `protobuf:"bytes,1,rep,name=response_code,json=responseCode,proto3" json:"response_code,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,2,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Responses) Reset() { + *x = Responses{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[49] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Responses) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Responses) ProtoMessage() {} + +func (x *Responses) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[49] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Responses.ProtoReflect.Descriptor instead. +func (*Responses) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{49} +} + +func (x *Responses) GetResponseCode() []*NamedResponseValue { + if x != nil { + return x.ResponseCode + } + return nil +} + +func (x *Responses) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +// A deterministic version of a JSON Schema object. +type Schema struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + XRef string `protobuf:"bytes,1,opt,name=_ref,json=Ref,proto3" json:"_ref,omitempty"` + Format string `protobuf:"bytes,2,opt,name=format,proto3" json:"format,omitempty"` + Title string `protobuf:"bytes,3,opt,name=title,proto3" json:"title,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + Default *Any `protobuf:"bytes,5,opt,name=default,proto3" json:"default,omitempty"` + MultipleOf float64 `protobuf:"fixed64,6,opt,name=multiple_of,json=multipleOf,proto3" json:"multiple_of,omitempty"` + Maximum float64 `protobuf:"fixed64,7,opt,name=maximum,proto3" json:"maximum,omitempty"` + ExclusiveMaximum bool `protobuf:"varint,8,opt,name=exclusive_maximum,json=exclusiveMaximum,proto3" json:"exclusive_maximum,omitempty"` + Minimum float64 `protobuf:"fixed64,9,opt,name=minimum,proto3" json:"minimum,omitempty"` + ExclusiveMinimum bool `protobuf:"varint,10,opt,name=exclusive_minimum,json=exclusiveMinimum,proto3" json:"exclusive_minimum,omitempty"` + MaxLength int64 `protobuf:"varint,11,opt,name=max_length,json=maxLength,proto3" json:"max_length,omitempty"` + MinLength int64 `protobuf:"varint,12,opt,name=min_length,json=minLength,proto3" json:"min_length,omitempty"` + Pattern string `protobuf:"bytes,13,opt,name=pattern,proto3" json:"pattern,omitempty"` + MaxItems int64 `protobuf:"varint,14,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"` + MinItems int64 `protobuf:"varint,15,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"` + UniqueItems bool `protobuf:"varint,16,opt,name=unique_items,json=uniqueItems,proto3" json:"unique_items,omitempty"` + MaxProperties int64 `protobuf:"varint,17,opt,name=max_properties,json=maxProperties,proto3" json:"max_properties,omitempty"` + MinProperties int64 `protobuf:"varint,18,opt,name=min_properties,json=minProperties,proto3" json:"min_properties,omitempty"` + Required []string `protobuf:"bytes,19,rep,name=required,proto3" json:"required,omitempty"` + Enum []*Any `protobuf:"bytes,20,rep,name=enum,proto3" json:"enum,omitempty"` + AdditionalProperties *AdditionalPropertiesItem `protobuf:"bytes,21,opt,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` + Type *TypeItem `protobuf:"bytes,22,opt,name=type,proto3" json:"type,omitempty"` + Items *ItemsItem `protobuf:"bytes,23,opt,name=items,proto3" json:"items,omitempty"` + AllOf []*Schema `protobuf:"bytes,24,rep,name=all_of,json=allOf,proto3" json:"all_of,omitempty"` + Properties *Properties `protobuf:"bytes,25,opt,name=properties,proto3" json:"properties,omitempty"` + Discriminator string `protobuf:"bytes,26,opt,name=discriminator,proto3" json:"discriminator,omitempty"` + ReadOnly bool `protobuf:"varint,27,opt,name=read_only,json=readOnly,proto3" json:"read_only,omitempty"` + Xml *Xml `protobuf:"bytes,28,opt,name=xml,proto3" json:"xml,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,29,opt,name=external_docs,json=externalDocs,proto3" json:"external_docs,omitempty"` + Example *Any `protobuf:"bytes,30,opt,name=example,proto3" json:"example,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,31,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Schema) Reset() { + *x = Schema{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[50] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Schema) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Schema) ProtoMessage() {} + +func (x *Schema) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[50] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Schema.ProtoReflect.Descriptor instead. +func (*Schema) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{50} +} + +func (x *Schema) GetXRef() string { + if x != nil { + return x.XRef + } + return "" +} + +func (x *Schema) GetFormat() string { + if x != nil { + return x.Format + } + return "" +} + +func (x *Schema) GetTitle() string { + if x != nil { + return x.Title + } + return "" +} + +func (x *Schema) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Schema) GetDefault() *Any { + if x != nil { + return x.Default + } + return nil +} + +func (x *Schema) GetMultipleOf() float64 { + if x != nil { + return x.MultipleOf + } + return 0 +} + +func (x *Schema) GetMaximum() float64 { + if x != nil { + return x.Maximum + } + return 0 +} + +func (x *Schema) GetExclusiveMaximum() bool { + if x != nil { + return x.ExclusiveMaximum + } + return false +} + +func (x *Schema) GetMinimum() float64 { + if x != nil { + return x.Minimum + } + return 0 +} + +func (x *Schema) GetExclusiveMinimum() bool { + if x != nil { + return x.ExclusiveMinimum + } + return false +} + +func (x *Schema) GetMaxLength() int64 { + if x != nil { + return x.MaxLength + } + return 0 +} + +func (x *Schema) GetMinLength() int64 { + if x != nil { + return x.MinLength + } + return 0 +} + +func (x *Schema) GetPattern() string { + if x != nil { + return x.Pattern + } + return "" +} + +func (x *Schema) GetMaxItems() int64 { + if x != nil { + return x.MaxItems + } + return 0 +} + +func (x *Schema) GetMinItems() int64 { + if x != nil { + return x.MinItems + } + return 0 +} + +func (x *Schema) GetUniqueItems() bool { + if x != nil { + return x.UniqueItems + } + return false +} + +func (x *Schema) GetMaxProperties() int64 { + if x != nil { + return x.MaxProperties + } + return 0 +} + +func (x *Schema) GetMinProperties() int64 { + if x != nil { + return x.MinProperties + } + return 0 +} + +func (x *Schema) GetRequired() []string { + if x != nil { + return x.Required + } + return nil +} + +func (x *Schema) GetEnum() []*Any { + if x != nil { + return x.Enum + } + return nil +} + +func (x *Schema) GetAdditionalProperties() *AdditionalPropertiesItem { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +func (x *Schema) GetType() *TypeItem { + if x != nil { + return x.Type + } + return nil +} + +func (x *Schema) GetItems() *ItemsItem { + if x != nil { + return x.Items + } + return nil +} + +func (x *Schema) GetAllOf() []*Schema { + if x != nil { + return x.AllOf + } + return nil +} + +func (x *Schema) GetProperties() *Properties { + if x != nil { + return x.Properties + } + return nil +} + +func (x *Schema) GetDiscriminator() string { + if x != nil { + return x.Discriminator + } + return "" +} + +func (x *Schema) GetReadOnly() bool { + if x != nil { + return x.ReadOnly + } + return false +} + +func (x *Schema) GetXml() *Xml { + if x != nil { + return x.Xml + } + return nil +} + +func (x *Schema) GetExternalDocs() *ExternalDocs { + if x != nil { + return x.ExternalDocs + } + return nil +} + +func (x *Schema) GetExample() *Any { + if x != nil { + return x.Example + } + return nil +} + +func (x *Schema) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type SchemaItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Oneof: + // *SchemaItem_Schema + // *SchemaItem_FileSchema + Oneof isSchemaItem_Oneof `protobuf_oneof:"oneof"` +} + +func (x *SchemaItem) Reset() { + *x = SchemaItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[51] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SchemaItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SchemaItem) ProtoMessage() {} + +func (x *SchemaItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[51] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SchemaItem.ProtoReflect.Descriptor instead. +func (*SchemaItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{51} +} + +func (m *SchemaItem) GetOneof() isSchemaItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (x *SchemaItem) GetSchema() *Schema { + if x, ok := x.GetOneof().(*SchemaItem_Schema); ok { + return x.Schema + } + return nil +} + +func (x *SchemaItem) GetFileSchema() *FileSchema { + if x, ok := x.GetOneof().(*SchemaItem_FileSchema); ok { + return x.FileSchema + } + return nil +} + +type isSchemaItem_Oneof interface { + isSchemaItem_Oneof() +} + +type SchemaItem_Schema struct { + Schema *Schema `protobuf:"bytes,1,opt,name=schema,proto3,oneof"` +} + +type SchemaItem_FileSchema struct { + FileSchema *FileSchema `protobuf:"bytes,2,opt,name=file_schema,json=fileSchema,proto3,oneof"` +} + +func (*SchemaItem_Schema) isSchemaItem_Oneof() {} + +func (*SchemaItem_FileSchema) isSchemaItem_Oneof() {} + +type SecurityDefinitions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedSecurityDefinitionsItem `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *SecurityDefinitions) Reset() { + *x = SecurityDefinitions{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[52] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SecurityDefinitions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SecurityDefinitions) ProtoMessage() {} + +func (x *SecurityDefinitions) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[52] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SecurityDefinitions.ProtoReflect.Descriptor instead. +func (*SecurityDefinitions) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{52} +} + +func (x *SecurityDefinitions) GetAdditionalProperties() []*NamedSecurityDefinitionsItem { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +type SecurityDefinitionsItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Oneof: + // *SecurityDefinitionsItem_BasicAuthenticationSecurity + // *SecurityDefinitionsItem_ApiKeySecurity + // *SecurityDefinitionsItem_Oauth2ImplicitSecurity + // *SecurityDefinitionsItem_Oauth2PasswordSecurity + // *SecurityDefinitionsItem_Oauth2ApplicationSecurity + // *SecurityDefinitionsItem_Oauth2AccessCodeSecurity + Oneof isSecurityDefinitionsItem_Oneof `protobuf_oneof:"oneof"` +} + +func (x *SecurityDefinitionsItem) Reset() { + *x = SecurityDefinitionsItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[53] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SecurityDefinitionsItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SecurityDefinitionsItem) ProtoMessage() {} + +func (x *SecurityDefinitionsItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[53] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SecurityDefinitionsItem.ProtoReflect.Descriptor instead. +func (*SecurityDefinitionsItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{53} +} + +func (m *SecurityDefinitionsItem) GetOneof() isSecurityDefinitionsItem_Oneof { + if m != nil { + return m.Oneof + } + return nil +} + +func (x *SecurityDefinitionsItem) GetBasicAuthenticationSecurity() *BasicAuthenticationSecurity { + if x, ok := x.GetOneof().(*SecurityDefinitionsItem_BasicAuthenticationSecurity); ok { + return x.BasicAuthenticationSecurity + } + return nil +} + +func (x *SecurityDefinitionsItem) GetApiKeySecurity() *ApiKeySecurity { + if x, ok := x.GetOneof().(*SecurityDefinitionsItem_ApiKeySecurity); ok { + return x.ApiKeySecurity + } + return nil +} + +func (x *SecurityDefinitionsItem) GetOauth2ImplicitSecurity() *Oauth2ImplicitSecurity { + if x, ok := x.GetOneof().(*SecurityDefinitionsItem_Oauth2ImplicitSecurity); ok { + return x.Oauth2ImplicitSecurity + } + return nil +} + +func (x *SecurityDefinitionsItem) GetOauth2PasswordSecurity() *Oauth2PasswordSecurity { + if x, ok := x.GetOneof().(*SecurityDefinitionsItem_Oauth2PasswordSecurity); ok { + return x.Oauth2PasswordSecurity + } + return nil +} + +func (x *SecurityDefinitionsItem) GetOauth2ApplicationSecurity() *Oauth2ApplicationSecurity { + if x, ok := x.GetOneof().(*SecurityDefinitionsItem_Oauth2ApplicationSecurity); ok { + return x.Oauth2ApplicationSecurity + } + return nil +} + +func (x *SecurityDefinitionsItem) GetOauth2AccessCodeSecurity() *Oauth2AccessCodeSecurity { + if x, ok := x.GetOneof().(*SecurityDefinitionsItem_Oauth2AccessCodeSecurity); ok { + return x.Oauth2AccessCodeSecurity + } + return nil +} + +type isSecurityDefinitionsItem_Oneof interface { + isSecurityDefinitionsItem_Oneof() +} + +type SecurityDefinitionsItem_BasicAuthenticationSecurity struct { + BasicAuthenticationSecurity *BasicAuthenticationSecurity `protobuf:"bytes,1,opt,name=basic_authentication_security,json=basicAuthenticationSecurity,proto3,oneof"` +} + +type SecurityDefinitionsItem_ApiKeySecurity struct { + ApiKeySecurity *ApiKeySecurity `protobuf:"bytes,2,opt,name=api_key_security,json=apiKeySecurity,proto3,oneof"` +} + +type SecurityDefinitionsItem_Oauth2ImplicitSecurity struct { + Oauth2ImplicitSecurity *Oauth2ImplicitSecurity `protobuf:"bytes,3,opt,name=oauth2_implicit_security,json=oauth2ImplicitSecurity,proto3,oneof"` +} + +type SecurityDefinitionsItem_Oauth2PasswordSecurity struct { + Oauth2PasswordSecurity *Oauth2PasswordSecurity `protobuf:"bytes,4,opt,name=oauth2_password_security,json=oauth2PasswordSecurity,proto3,oneof"` +} + +type SecurityDefinitionsItem_Oauth2ApplicationSecurity struct { + Oauth2ApplicationSecurity *Oauth2ApplicationSecurity `protobuf:"bytes,5,opt,name=oauth2_application_security,json=oauth2ApplicationSecurity,proto3,oneof"` +} + +type SecurityDefinitionsItem_Oauth2AccessCodeSecurity struct { + Oauth2AccessCodeSecurity *Oauth2AccessCodeSecurity `protobuf:"bytes,6,opt,name=oauth2_access_code_security,json=oauth2AccessCodeSecurity,proto3,oneof"` +} + +func (*SecurityDefinitionsItem_BasicAuthenticationSecurity) isSecurityDefinitionsItem_Oneof() {} + +func (*SecurityDefinitionsItem_ApiKeySecurity) isSecurityDefinitionsItem_Oneof() {} + +func (*SecurityDefinitionsItem_Oauth2ImplicitSecurity) isSecurityDefinitionsItem_Oneof() {} + +func (*SecurityDefinitionsItem_Oauth2PasswordSecurity) isSecurityDefinitionsItem_Oneof() {} + +func (*SecurityDefinitionsItem_Oauth2ApplicationSecurity) isSecurityDefinitionsItem_Oneof() {} + +func (*SecurityDefinitionsItem_Oauth2AccessCodeSecurity) isSecurityDefinitionsItem_Oneof() {} + +type SecurityRequirement struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedStringArray `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *SecurityRequirement) Reset() { + *x = SecurityRequirement{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[54] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SecurityRequirement) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SecurityRequirement) ProtoMessage() {} + +func (x *SecurityRequirement) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[54] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SecurityRequirement.ProtoReflect.Descriptor instead. +func (*SecurityRequirement) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{54} +} + +func (x *SecurityRequirement) GetAdditionalProperties() []*NamedStringArray { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +type StringArray struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Value []string `protobuf:"bytes,1,rep,name=value,proto3" json:"value,omitempty"` +} + +func (x *StringArray) Reset() { + *x = StringArray{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[55] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *StringArray) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*StringArray) ProtoMessage() {} + +func (x *StringArray) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[55] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use StringArray.ProtoReflect.Descriptor instead. +func (*StringArray) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{55} +} + +func (x *StringArray) GetValue() []string { + if x != nil { + return x.Value + } + return nil +} + +type Tag struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + ExternalDocs *ExternalDocs `protobuf:"bytes,3,opt,name=external_docs,json=externalDocs,proto3" json:"external_docs,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,4,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Tag) Reset() { + *x = Tag{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[56] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Tag) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Tag) ProtoMessage() {} + +func (x *Tag) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[56] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Tag.ProtoReflect.Descriptor instead. +func (*Tag) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{56} +} + +func (x *Tag) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Tag) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Tag) GetExternalDocs() *ExternalDocs { + if x != nil { + return x.ExternalDocs + } + return nil +} + +func (x *Tag) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +type TypeItem struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Value []string `protobuf:"bytes,1,rep,name=value,proto3" json:"value,omitempty"` +} + +func (x *TypeItem) Reset() { + *x = TypeItem{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[57] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TypeItem) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TypeItem) ProtoMessage() {} + +func (x *TypeItem) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[57] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TypeItem.ProtoReflect.Descriptor instead. +func (*TypeItem) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{57} +} + +func (x *TypeItem) GetValue() []string { + if x != nil { + return x.Value + } + return nil +} + +// Any property starting with x- is valid. +type VendorExtension struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AdditionalProperties []*NamedAny `protobuf:"bytes,1,rep,name=additional_properties,json=additionalProperties,proto3" json:"additional_properties,omitempty"` +} + +func (x *VendorExtension) Reset() { + *x = VendorExtension{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[58] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VendorExtension) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VendorExtension) ProtoMessage() {} + +func (x *VendorExtension) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[58] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VendorExtension.ProtoReflect.Descriptor instead. +func (*VendorExtension) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{58} +} + +func (x *VendorExtension) GetAdditionalProperties() []*NamedAny { + if x != nil { + return x.AdditionalProperties + } + return nil +} + +type Xml struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` + Prefix string `protobuf:"bytes,3,opt,name=prefix,proto3" json:"prefix,omitempty"` + Attribute bool `protobuf:"varint,4,opt,name=attribute,proto3" json:"attribute,omitempty"` + Wrapped bool `protobuf:"varint,5,opt,name=wrapped,proto3" json:"wrapped,omitempty"` + VendorExtension []*NamedAny `protobuf:"bytes,6,rep,name=vendor_extension,json=vendorExtension,proto3" json:"vendor_extension,omitempty"` +} + +func (x *Xml) Reset() { + *x = Xml{} + if protoimpl.UnsafeEnabled { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[59] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Xml) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Xml) ProtoMessage() {} + +func (x *Xml) ProtoReflect() protoreflect.Message { + mi := &file_openapiv2_OpenAPIv2_proto_msgTypes[59] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Xml.ProtoReflect.Descriptor instead. +func (*Xml) Descriptor() ([]byte, []int) { + return file_openapiv2_OpenAPIv2_proto_rawDescGZIP(), []int{59} +} + +func (x *Xml) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Xml) GetNamespace() string { + if x != nil { + return x.Namespace + } + return "" +} + +func (x *Xml) GetPrefix() string { + if x != nil { + return x.Prefix + } + return "" +} + +func (x *Xml) GetAttribute() bool { + if x != nil { + return x.Attribute + } + return false +} + +func (x *Xml) GetWrapped() bool { + if x != nil { + return x.Wrapped + } + return false +} + +func (x *Xml) GetVendorExtension() []*NamedAny { + if x != nil { + return x.VendorExtension + } + return nil +} + +var File_openapiv2_OpenAPIv2_proto protoreflect.FileDescriptor + +var file_openapiv2_OpenAPIv2_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x4f, 0x70, 0x65, 0x6e, + 0x41, 0x50, 0x49, 0x76, 0x32, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x22, 0x6d, 0x0a, 0x18, 0x41, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, + 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x2c, + 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, + 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x48, 0x00, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x1a, 0x0a, 0x07, + 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, + 0x07, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x42, 0x07, 0x0a, 0x05, 0x6f, 0x6e, 0x65, 0x6f, + 0x66, 0x22, 0x45, 0x0a, 0x03, 0x41, 0x6e, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x79, 0x61, 0x6d, 0x6c, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x79, 0x61, 0x6d, 0x6c, 0x22, 0xab, 0x01, 0x0a, 0x0e, 0x41, 0x70, 0x69, + 0x4b, 0x65, 0x79, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x74, + 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x02, 0x69, 0x6e, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, + 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, + 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, + 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x94, 0x01, 0x0a, 0x1b, 0x42, 0x61, 0x73, 0x69, 0x63, + 0x41, 0x75, 0x74, 0x68, 0x65, 0x6e, 0x74, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, + 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x10, + 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, + 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xde, 0x01, + 0x0a, 0x0d, 0x42, 0x6f, 0x64, 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, + 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x02, 0x69, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, + 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, + 0x64, 0x12, 0x2a, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x3f, 0x0a, + 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, + 0x6e, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, + 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x86, + 0x01, 0x0a, 0x07, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, + 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, + 0x12, 0x14, 0x0a, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, + 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, + 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, + 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x54, 0x0a, 0x07, 0x44, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x12, 0x49, 0x0a, 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, + 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, + 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0x5b, 0x0a, + 0x0b, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4c, 0x0a, 0x15, + 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, + 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, + 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0xe8, 0x05, 0x0a, 0x08, 0x44, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x77, 0x61, 0x67, 0x67, + 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x77, 0x61, 0x67, 0x67, 0x65, + 0x72, 0x12, 0x24, 0x0a, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x10, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x49, 0x6e, 0x66, + 0x6f, 0x52, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x62, + 0x61, 0x73, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x62, 0x61, 0x73, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x73, 0x18, 0x06, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x73, 0x12, 0x1a, + 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x08, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x73, 0x12, 0x27, 0x0a, 0x05, 0x70, 0x61, + 0x74, 0x68, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x73, 0x52, 0x05, 0x70, 0x61, + 0x74, 0x68, 0x73, 0x12, 0x39, 0x0a, 0x0b, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x52, 0x0b, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x40, + 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x0a, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, + 0x12, 0x3d, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x73, 0x18, 0x0b, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, + 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x09, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x73, 0x12, + 0x3b, 0x0a, 0x08, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x18, 0x0c, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x1f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, + 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x6d, 0x65, + 0x6e, 0x74, 0x52, 0x08, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x52, 0x0a, 0x14, + 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x5f, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, + 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x13, 0x73, 0x65, 0x63, + 0x75, 0x72, 0x69, 0x74, 0x79, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x12, 0x23, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x54, 0x61, 0x67, 0x52, + 0x04, 0x74, 0x61, 0x67, 0x73, 0x12, 0x3d, 0x0a, 0x0d, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x5f, 0x64, 0x6f, 0x63, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6f, + 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x52, 0x0c, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x44, 0x6f, 0x63, 0x73, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, + 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, + 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, + 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, + 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x55, 0x0a, 0x08, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, + 0x73, 0x12, 0x49, 0x0a, 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, + 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, + 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0x83, 0x01, 0x0a, + 0x0c, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x12, 0x20, 0x0a, + 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, + 0x6c, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, + 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, + 0x6f, 0x6e, 0x22, 0xff, 0x02, 0x0a, 0x0a, 0x46, 0x69, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, + 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, + 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x29, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, + 0x41, 0x6e, 0x79, 0x52, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x1a, 0x0a, 0x08, + 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, + 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, + 0x72, 0x65, 0x61, 0x64, 0x5f, 0x6f, 0x6e, 0x6c, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x08, 0x72, 0x65, 0x61, 0x64, 0x4f, 0x6e, 0x6c, 0x79, 0x12, 0x3d, 0x0a, 0x0d, 0x65, 0x78, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x64, 0x6f, 0x63, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x18, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x45, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x52, 0x0c, 0x65, 0x78, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x78, 0x61, 0x6d, + 0x70, 0x6c, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x07, 0x65, 0x78, 0x61, 0x6d, + 0x70, 0x6c, 0x65, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, + 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, + 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, + 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, + 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xab, 0x06, 0x0a, 0x1a, 0x46, 0x6f, 0x72, 0x6d, 0x44, 0x61, 0x74, + 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, + 0x0e, 0x0a, 0x02, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x6e, 0x12, + 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2a, 0x0a, 0x11, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x65, + 0x6d, 0x70, 0x74, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x31, 0x0a, + 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6f, + 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, + 0x69, 0x76, 0x65, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, + 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x66, + 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6c, + 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x29, 0x0a, + 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, + 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x69, + 0x6d, 0x75, 0x6d, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, + 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, + 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, + 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, + 0x18, 0x0a, 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x01, + 0x52, 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, + 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0e, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, + 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x61, 0x78, 0x5f, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x69, 0x6e, 0x5f, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x18, 0x10, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, + 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x12, 0x1b, + 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x08, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x6d, + 0x69, 0x6e, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x13, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, + 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x75, 0x6e, 0x69, 0x71, + 0x75, 0x65, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, + 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x23, 0x0a, 0x04, 0x65, + 0x6e, 0x75, 0x6d, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x04, 0x65, 0x6e, 0x75, 0x6d, + 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x6f, 0x66, 0x18, + 0x16, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x4f, + 0x66, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x17, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, + 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, + 0x6f, 0x6e, 0x22, 0xab, 0x05, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x12, 0x0a, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x31, 0x0a, 0x05, 0x69, 0x74, 0x65, + 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, + 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x2b, 0x0a, 0x11, + 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x29, 0x0a, 0x07, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x07, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, + 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x61, 0x78, 0x69, + 0x6d, 0x75, 0x6d, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, + 0x73, 0x69, 0x76, 0x65, 0x4d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x18, 0x0a, 0x07, 0x6d, + 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x08, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x69, + 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, + 0x76, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x69, 0x6e, 0x69, 0x6d, + 0x75, 0x6d, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x61, 0x78, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x69, 0x6e, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, + 0x0b, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x61, + 0x78, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, + 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x5f, 0x69, + 0x74, 0x65, 0x6d, 0x73, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x69, 0x6e, 0x49, + 0x74, 0x65, 0x6d, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x5f, 0x69, + 0x74, 0x65, 0x6d, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x75, 0x6e, 0x69, 0x71, + 0x75, 0x65, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x23, 0x0a, 0x04, 0x65, 0x6e, 0x75, 0x6d, 0x18, + 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x04, 0x65, 0x6e, 0x75, 0x6d, 0x12, 0x1f, 0x0a, 0x0b, + 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x6f, 0x66, 0x18, 0x11, 0x20, 0x01, 0x28, + 0x01, 0x52, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x4f, 0x66, 0x12, 0x20, 0x0a, + 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x12, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x13, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, + 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, + 0x22, 0xfd, 0x05, 0x0a, 0x18, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x1a, 0x0a, + 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x6e, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, + 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x31, 0x0a, 0x05, 0x69, + 0x74, 0x65, 0x6d, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, + 0x65, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x2b, + 0x0a, 0x11, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x66, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6c, 0x6c, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x29, 0x0a, 0x07, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, + 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x07, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, + 0x6d, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, + 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x61, + 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, + 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x18, 0x0a, + 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, + 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, + 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0d, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x69, 0x6e, + 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x61, 0x78, 0x5f, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x69, 0x6e, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x10, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x12, 0x1b, 0x0a, 0x09, + 0x6d, 0x61, 0x78, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x08, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x69, 0x6e, + 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x69, + 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, + 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x13, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x75, 0x6e, + 0x69, 0x71, 0x75, 0x65, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x23, 0x0a, 0x04, 0x65, 0x6e, 0x75, + 0x6d, 0x18, 0x14, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x04, 0x65, 0x6e, 0x75, 0x6d, 0x12, 0x1f, + 0x0a, 0x0b, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x6f, 0x66, 0x18, 0x15, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x4f, 0x66, 0x12, + 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x16, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, + 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, + 0x22, 0x57, 0x0a, 0x07, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x4c, 0x0a, 0x15, 0x61, + 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, + 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x48, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, + 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0xa1, 0x02, 0x0a, 0x04, 0x49, 0x6e, + 0x66, 0x6f, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x28, 0x0a, 0x10, 0x74, 0x65, 0x72, 0x6d, 0x73, 0x5f, 0x6f, 0x66, + 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, + 0x74, 0x65, 0x72, 0x6d, 0x73, 0x4f, 0x66, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x2d, + 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x63, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x13, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x43, 0x6f, 0x6e, + 0x74, 0x61, 0x63, 0x74, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x63, 0x74, 0x12, 0x2d, 0x0a, + 0x07, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, + 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4c, 0x69, 0x63, 0x65, + 0x6e, 0x73, 0x65, 0x52, 0x07, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x3f, 0x0a, 0x10, + 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, + 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x37, 0x0a, + 0x09, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x2a, 0x0a, 0x06, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x06, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x22, 0x44, 0x0a, 0x0d, 0x4a, 0x73, 0x6f, 0x6e, 0x52, 0x65, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x11, 0x0a, 0x04, 0x5f, 0x72, 0x65, 0x66, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x52, 0x65, 0x66, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x70, 0x0a, 0x07, + 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, + 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x3f, 0x0a, + 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, + 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, + 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x45, + 0x0a, 0x08, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x25, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, + 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4b, 0x0a, 0x0b, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x48, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x22, 0x51, 0x0a, 0x0e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4f, 0x0a, 0x0d, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x50, 0x61, + 0x74, 0x68, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x61, 0x74, 0x68, 0x49, 0x74, 0x65, 0x6d, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4f, 0x0a, 0x0d, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2a, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x59, 0x0a, 0x12, 0x4e, 0x61, 0x6d, 0x65, 0x64, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x2f, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x19, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x22, 0x4b, 0x0a, 0x0b, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x32, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, + 0x6d, 0x0a, 0x1c, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, + 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x39, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, + 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x37, + 0x0a, 0x0b, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x55, 0x0a, 0x10, 0x4e, 0x61, 0x6d, 0x65, 0x64, + 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x41, 0x72, 0x72, 0x61, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x2d, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, + 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x41, 0x72, 0x72, 0x61, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xb5, + 0x03, 0x0a, 0x10, 0x4e, 0x6f, 0x6e, 0x42, 0x6f, 0x64, 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x12, 0x65, 0x0a, 0x1b, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x5f, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x75, 0x62, 0x5f, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x48, 0x00, + 0x52, 0x18, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x6c, 0x0a, 0x1e, 0x66, 0x6f, + 0x72, 0x6d, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x5f, 0x73, 0x75, 0x62, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, + 0x46, 0x6f, 0x72, 0x6d, 0x44, 0x61, 0x74, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x48, 0x00, 0x52, 0x1a, 0x66, 0x6f, + 0x72, 0x6d, 0x44, 0x61, 0x74, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, + 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x62, 0x0a, 0x1a, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x75, 0x62, 0x5f, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x6f, + 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x48, 0x00, 0x52, 0x17, 0x71, 0x75, 0x65, 0x72, 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x5f, 0x0a, 0x19, + 0x70, 0x61, 0x74, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, + 0x75, 0x62, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x22, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x61, 0x74, + 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x48, 0x00, 0x52, 0x16, 0x70, 0x61, 0x74, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x42, 0x07, 0x0a, + 0x05, 0x6f, 0x6e, 0x65, 0x6f, 0x66, 0x22, 0xa1, 0x02, 0x0a, 0x18, 0x4f, 0x61, 0x75, 0x74, 0x68, + 0x32, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x53, 0x65, 0x63, 0x75, 0x72, + 0x69, 0x74, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x6c, 0x6f, 0x77, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x30, 0x0a, 0x06, 0x73, + 0x63, 0x6f, 0x70, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x53, + 0x63, 0x6f, 0x70, 0x65, 0x73, 0x52, 0x06, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x12, 0x2b, 0x0a, + 0x11, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x75, + 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, + 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x72, 0x6c, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x72, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, + 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, + 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, + 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xf5, 0x01, 0x0a, 0x19, 0x4f, + 0x61, 0x75, 0x74, 0x68, 0x32, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, + 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x66, 0x6c, 0x6f, 0x77, + 0x12, 0x30, 0x0a, 0x06, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x18, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, 0x61, + 0x75, 0x74, 0x68, 0x32, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x52, 0x06, 0x73, 0x63, 0x6f, 0x70, + 0x65, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x75, 0x72, 0x6c, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x72, 0x6c, 0x12, + 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, + 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, + 0x6f, 0x6e, 0x22, 0x82, 0x02, 0x0a, 0x16, 0x4f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x49, 0x6d, 0x70, + 0x6c, 0x69, 0x63, 0x69, 0x74, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x12, 0x0a, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x30, 0x0a, 0x06, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x4f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x52, + 0x06, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x61, 0x75, 0x74, 0x68, 0x6f, + 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x10, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x55, 0x72, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, + 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, + 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, + 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xf2, 0x01, 0x0a, 0x16, 0x4f, 0x61, 0x75, 0x74, + 0x68, 0x32, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, + 0x74, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x30, 0x0a, 0x06, 0x73, 0x63, + 0x6f, 0x70, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x53, 0x63, + 0x6f, 0x70, 0x65, 0x73, 0x52, 0x06, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x12, 0x1b, 0x0a, 0x09, + 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x08, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x72, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x10, 0x76, + 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, + 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, + 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x5c, 0x0a, 0x0c, + 0x4f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x12, 0x4c, 0x0a, 0x15, + 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, + 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x53, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, + 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0x9e, 0x04, 0x0a, 0x09, 0x4f, + 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x12, 0x18, 0x0a, 0x07, + 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, + 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3d, 0x0a, 0x0d, 0x65, 0x78, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x64, 0x6f, 0x63, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x18, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x45, 0x78, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x52, 0x0c, 0x65, 0x78, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x6f, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6f, + 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, + 0x6f, 0x64, 0x75, 0x63, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, + 0x6f, 0x64, 0x75, 0x63, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, + 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, + 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, + 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x49, 0x74, + 0x65, 0x6d, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x33, + 0x0a, 0x09, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x73, 0x52, 0x09, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x73, 0x18, 0x0a, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x73, 0x12, 0x1e, 0x0a, + 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x0b, 0x20, 0x01, 0x28, + 0x08, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x3b, 0x0a, + 0x08, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x65, 0x63, + 0x75, 0x72, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, + 0x52, 0x08, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, + 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0d, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, + 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xa6, 0x01, 0x0a, 0x09, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x0e, 0x62, 0x6f, 0x64, + 0x79, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x19, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x42, + 0x6f, 0x64, 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x0d, + 0x62, 0x6f, 0x64, 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x4c, 0x0a, + 0x12, 0x6e, 0x6f, 0x6e, 0x5f, 0x62, 0x6f, 0x64, 0x79, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x6f, 0x6e, 0x42, 0x6f, 0x64, 0x79, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x10, 0x6e, 0x6f, 0x6e, 0x42, 0x6f, + 0x64, 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x42, 0x07, 0x0a, 0x05, 0x6f, + 0x6e, 0x65, 0x6f, 0x66, 0x22, 0x67, 0x0a, 0x14, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4f, 0x0a, 0x15, + 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, + 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0x94, 0x01, + 0x0a, 0x0e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x49, 0x74, 0x65, 0x6d, + 0x12, 0x35, 0x0a, 0x09, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, + 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x48, 0x00, 0x52, 0x09, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x0e, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, + 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x19, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4a, 0x73, 0x6f, + 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0d, 0x6a, 0x73, + 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x6f, + 0x6e, 0x65, 0x6f, 0x66, 0x22, 0xcf, 0x03, 0x0a, 0x08, 0x50, 0x61, 0x74, 0x68, 0x49, 0x74, 0x65, + 0x6d, 0x12, 0x11, 0x0a, 0x04, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x52, 0x65, 0x66, 0x12, 0x27, 0x0a, 0x03, 0x67, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, + 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x03, 0x67, 0x65, 0x74, 0x12, 0x27, 0x0a, + 0x03, 0x70, 0x75, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x03, 0x70, 0x75, 0x74, 0x12, 0x29, 0x0a, 0x04, 0x70, 0x6f, 0x73, 0x74, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x32, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x70, 0x6f, 0x73, + 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, + 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, + 0x12, 0x2f, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, + 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x12, 0x29, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, 0x70, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x12, 0x2b, 0x0a, 0x05, + 0x70, 0x61, 0x74, 0x63, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x05, 0x70, 0x61, 0x74, 0x63, 0x68, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, + 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, + 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, + 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xfb, 0x05, 0x0a, 0x16, 0x50, 0x61, 0x74, 0x68, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x6e, 0x12, 0x20, 0x0a, + 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, + 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, + 0x31, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, + 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x72, 0x69, 0x6d, + 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x05, 0x69, 0x74, 0x65, + 0x6d, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, + 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, + 0x29, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, + 0x79, 0x52, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x61, + 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x61, 0x78, + 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, + 0x65, 0x5f, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x61, 0x78, 0x69, 0x6d, 0x75, + 0x6d, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0c, 0x20, 0x01, + 0x28, 0x01, 0x52, 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, + 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, + 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, + 0x65, 0x4d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x61, 0x78, 0x5f, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x61, + 0x78, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x69, 0x6e, 0x5f, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x69, 0x6e, + 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, + 0x6e, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, + 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x11, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x1b, 0x0a, + 0x09, 0x6d, 0x69, 0x6e, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x08, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x75, 0x6e, + 0x69, 0x71, 0x75, 0x65, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x13, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0b, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x23, 0x0a, + 0x04, 0x65, 0x6e, 0x75, 0x6d, 0x18, 0x14, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x04, 0x65, 0x6e, + 0x75, 0x6d, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x6f, + 0x66, 0x18, 0x15, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, + 0x65, 0x4f, 0x66, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, + 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x16, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, + 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, + 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, + 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x77, 0x0a, 0x05, 0x50, 0x61, 0x74, 0x68, 0x73, 0x12, 0x3f, 0x0a, + 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, + 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, + 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2d, + 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6f, + 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x50, + 0x61, 0x74, 0x68, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x22, 0x92, 0x05, + 0x0a, 0x0f, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x49, 0x74, 0x65, 0x6d, + 0x73, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x31, 0x0a, + 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6f, + 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, + 0x69, 0x76, 0x65, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, + 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x66, + 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6c, + 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x29, 0x0a, + 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, + 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x69, + 0x6d, 0x75, 0x6d, 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, + 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, + 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, + 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, + 0x18, 0x0a, 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x08, 0x20, 0x01, 0x28, 0x01, + 0x52, 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, + 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x09, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, + 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x61, 0x78, 0x5f, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x69, 0x6e, 0x5f, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, + 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x12, 0x1b, + 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x08, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x6d, + 0x69, 0x6e, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, + 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x75, 0x6e, 0x69, 0x71, + 0x75, 0x65, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, + 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x23, 0x0a, 0x04, 0x65, + 0x6e, 0x75, 0x6d, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x04, 0x65, 0x6e, 0x75, 0x6d, + 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x6f, 0x66, 0x18, + 0x11, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x4f, + 0x66, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x12, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, + 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, + 0x6f, 0x6e, 0x22, 0x5a, 0x0a, 0x0a, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, + 0x12, 0x4c, 0x0a, 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x70, + 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, + 0x65, 0x64, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0xa8, + 0x06, 0x0a, 0x17, 0x51, 0x75, 0x65, 0x72, 0x79, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x53, 0x75, 0x62, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, + 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x72, 0x65, + 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x02, 0x69, 0x6e, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2a, 0x0a, 0x11, + 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x45, 0x6d, + 0x70, 0x74, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, + 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, + 0x72, 0x6d, 0x61, 0x74, 0x12, 0x31, 0x0a, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x08, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, + 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x69, 0x74, 0x69, 0x76, 0x65, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x73, + 0x52, 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x63, 0x6f, 0x6c, 0x6c, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x6f, + 0x72, 0x6d, 0x61, 0x74, 0x12, 0x29, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, + 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, + 0x18, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x01, + 0x52, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, + 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0c, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, + 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, + 0x6d, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, + 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x69, + 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, + 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x1d, 0x0a, + 0x0a, 0x6d, 0x61, 0x78, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0f, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x1d, 0x0a, 0x0a, + 0x6d, 0x69, 0x6e, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x10, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x70, + 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, + 0x74, 0x74, 0x65, 0x72, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x5f, 0x69, 0x74, 0x65, + 0x6d, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, + 0x6d, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, + 0x13, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, + 0x21, 0x0a, 0x0c, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, + 0x14, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x49, 0x74, 0x65, + 0x6d, 0x73, 0x12, 0x23, 0x0a, 0x04, 0x65, 0x6e, 0x75, 0x6d, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, + 0x79, 0x52, 0x04, 0x65, 0x6e, 0x75, 0x6d, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x75, 0x6c, 0x74, 0x69, + 0x70, 0x6c, 0x65, 0x5f, 0x6f, 0x66, 0x18, 0x16, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x6d, 0x75, + 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x4f, 0x66, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, + 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x17, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, + 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, + 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xfe, 0x01, 0x0a, 0x08, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2e, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x49, 0x74, 0x65, 0x6d, + 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x2d, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x52, 0x07, + 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x30, 0x0a, 0x08, 0x65, 0x78, 0x61, 0x6d, 0x70, + 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x52, + 0x08, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, + 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, + 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, + 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x65, 0x0a, 0x13, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x12, 0x4e, 0x0a, 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, + 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x19, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, + 0x6d, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x52, 0x14, 0x61, 0x64, 0x64, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, + 0x73, 0x22, 0x90, 0x01, 0x0a, 0x0d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x08, 0x72, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x42, 0x0a, 0x0e, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, + 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x19, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4a, 0x73, 0x6f, + 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0d, 0x6a, 0x73, + 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x6f, + 0x6e, 0x65, 0x6f, 0x66, 0x22, 0x91, 0x01, 0x0a, 0x09, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x5f, 0x63, + 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, + 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, + 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, + 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0xaf, 0x09, 0x0a, 0x06, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x12, 0x11, 0x0a, 0x04, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x52, 0x65, 0x66, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x14, + 0x0a, 0x05, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, + 0x69, 0x74, 0x6c, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x29, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x6f, 0x66, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x70, 0x6c, 0x65, + 0x4f, 0x66, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, + 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x5f, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, + 0x6d, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, + 0x76, 0x65, 0x4d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x69, 0x6e, + 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x09, 0x20, 0x01, 0x28, 0x01, 0x52, 0x07, 0x6d, 0x69, 0x6e, 0x69, + 0x6d, 0x75, 0x6d, 0x12, 0x2b, 0x0a, 0x11, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, + 0x5f, 0x6d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, + 0x65, 0x78, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x76, 0x65, 0x4d, 0x69, 0x6e, 0x69, 0x6d, 0x75, 0x6d, + 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x61, 0x78, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0b, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, + 0x1d, 0x0a, 0x0a, 0x6d, 0x69, 0x6e, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0c, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x18, + 0x0a, 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x5f, + 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x61, 0x78, + 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x5f, 0x69, 0x74, 0x65, + 0x6d, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, + 0x6d, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, 0x5f, 0x69, 0x74, 0x65, + 0x6d, 0x73, 0x18, 0x10, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x75, 0x6e, 0x69, 0x71, 0x75, 0x65, + 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x6d, 0x61, 0x78, 0x5f, 0x70, 0x72, 0x6f, + 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x11, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x6d, + 0x61, 0x78, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x25, 0x0a, 0x0e, + 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x12, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x0d, 0x6d, 0x69, 0x6e, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, + 0x69, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, + 0x13, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, + 0x23, 0x0a, 0x04, 0x65, 0x6e, 0x75, 0x6d, 0x18, 0x14, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, + 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x04, + 0x65, 0x6e, 0x75, 0x6d, 0x12, 0x59, 0x0a, 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x61, 0x6c, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x15, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, + 0x2e, 0x41, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, + 0x72, 0x74, 0x69, 0x65, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, + 0x28, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x16, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, + 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x49, + 0x74, 0x65, 0x6d, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x69, 0x74, 0x65, + 0x6d, 0x73, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x52, + 0x05, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x29, 0x0a, 0x06, 0x61, 0x6c, 0x6c, 0x5f, 0x6f, 0x66, + 0x18, 0x18, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x61, 0x6c, 0x6c, 0x4f, + 0x66, 0x12, 0x36, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, + 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x52, 0x0a, 0x70, + 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x12, 0x24, 0x0a, 0x0d, 0x64, 0x69, 0x73, + 0x63, 0x72, 0x69, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0d, 0x64, 0x69, 0x73, 0x63, 0x72, 0x69, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x12, + 0x1b, 0x0a, 0x09, 0x72, 0x65, 0x61, 0x64, 0x5f, 0x6f, 0x6e, 0x6c, 0x79, 0x18, 0x1b, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x08, 0x72, 0x65, 0x61, 0x64, 0x4f, 0x6e, 0x6c, 0x79, 0x12, 0x21, 0x0a, 0x03, + 0x78, 0x6d, 0x6c, 0x18, 0x1c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x58, 0x6d, 0x6c, 0x52, 0x03, 0x78, 0x6d, 0x6c, 0x12, + 0x3d, 0x0a, 0x0d, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x64, 0x6f, 0x63, 0x73, + 0x18, 0x1d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x32, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, + 0x52, 0x0c, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x12, 0x29, + 0x0a, 0x07, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x0f, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x41, 0x6e, 0x79, + 0x52, 0x07, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, + 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x1f, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, + 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, + 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x7e, 0x0a, 0x0a, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x2c, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x48, 0x00, 0x52, 0x06, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x39, 0x0a, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x42, 0x07, 0x0a, 0x05, 0x6f, 0x6e, 0x65, 0x6f, 0x66, 0x22, 0x74, 0x0a, 0x13, 0x53, 0x65, + 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x44, 0x65, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x12, 0x5d, 0x0a, 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, + 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x28, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, + 0x6d, 0x65, 0x64, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x44, 0x65, 0x66, 0x69, 0x6e, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, + 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, + 0x22, 0xe9, 0x04, 0x0a, 0x17, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x44, 0x65, 0x66, + 0x69, 0x6e, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x6d, 0x0a, 0x1d, + 0x62, 0x61, 0x73, 0x69, 0x63, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x65, 0x6e, 0x74, 0x69, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, + 0x2e, 0x42, 0x61, 0x73, 0x69, 0x63, 0x41, 0x75, 0x74, 0x68, 0x65, 0x6e, 0x74, 0x69, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x48, 0x00, 0x52, 0x1b, + 0x62, 0x61, 0x73, 0x69, 0x63, 0x41, 0x75, 0x74, 0x68, 0x65, 0x6e, 0x74, 0x69, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x46, 0x0a, 0x10, 0x61, + 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x41, 0x70, 0x69, 0x4b, 0x65, 0x79, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, + 0x79, 0x48, 0x00, 0x52, 0x0e, 0x61, 0x70, 0x69, 0x4b, 0x65, 0x79, 0x53, 0x65, 0x63, 0x75, 0x72, + 0x69, 0x74, 0x79, 0x12, 0x5e, 0x0a, 0x18, 0x6f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x5f, 0x69, 0x6d, + 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x4f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x49, 0x6d, 0x70, 0x6c, 0x69, 0x63, 0x69, + 0x74, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x48, 0x00, 0x52, 0x16, 0x6f, 0x61, 0x75, + 0x74, 0x68, 0x32, 0x49, 0x6d, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x53, 0x65, 0x63, 0x75, 0x72, + 0x69, 0x74, 0x79, 0x12, 0x5e, 0x0a, 0x18, 0x6f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x5f, 0x70, 0x61, + 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x32, 0x2e, 0x4f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, + 0x64, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x48, 0x00, 0x52, 0x16, 0x6f, 0x61, 0x75, + 0x74, 0x68, 0x32, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x53, 0x65, 0x63, 0x75, 0x72, + 0x69, 0x74, 0x79, 0x12, 0x67, 0x0a, 0x1b, 0x6f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x5f, 0x61, 0x70, + 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, + 0x74, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x41, 0x70, 0x70, 0x6c, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x48, + 0x00, 0x52, 0x19, 0x6f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x41, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x65, 0x0a, 0x1b, + 0x6f, 0x61, 0x75, 0x74, 0x68, 0x32, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x63, 0x6f, + 0x64, 0x65, 0x5f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x24, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4f, + 0x61, 0x75, 0x74, 0x68, 0x32, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x53, + 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x48, 0x00, 0x52, 0x18, 0x6f, 0x61, 0x75, 0x74, 0x68, + 0x32, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x53, 0x65, 0x63, 0x75, 0x72, + 0x69, 0x74, 0x79, 0x42, 0x07, 0x0a, 0x05, 0x6f, 0x6e, 0x65, 0x6f, 0x66, 0x22, 0x68, 0x0a, 0x13, + 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x12, 0x51, 0x0a, 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, + 0x6c, 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, + 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x70, + 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0x23, 0x0a, 0x0b, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x41, 0x72, 0x72, 0x61, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xbb, 0x01, 0x0a, 0x03, + 0x54, 0x61, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3d, 0x0a, 0x0d, 0x65, 0x78, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x64, 0x6f, 0x63, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x18, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x45, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x52, 0x0c, 0x65, 0x78, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x44, 0x6f, 0x63, 0x73, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, + 0x6f, 0x72, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, + 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, + 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x0a, 0x08, 0x54, 0x79, 0x70, + 0x65, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x5c, 0x0a, 0x0f, 0x56, + 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x49, + 0x0a, 0x15, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x5f, 0x70, 0x72, 0x6f, + 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, + 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, + 0x41, 0x6e, 0x79, 0x52, 0x14, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x50, + 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x22, 0xc8, 0x01, 0x0a, 0x03, 0x58, 0x6d, + 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x1c, 0x0a, 0x09, 0x61, + 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, + 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x77, 0x72, 0x61, + 0x70, 0x70, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x77, 0x72, 0x61, 0x70, + 0x70, 0x65, 0x64, 0x12, 0x3f, 0x0a, 0x10, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x5f, 0x65, 0x78, + 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, + 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, + 0x41, 0x6e, 0x79, 0x52, 0x0f, 0x76, 0x65, 0x6e, 0x64, 0x6f, 0x72, 0x45, 0x78, 0x74, 0x65, 0x6e, + 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x3e, 0x0a, 0x0e, 0x6f, 0x72, 0x67, 0x2e, 0x6f, 0x70, 0x65, 0x6e, + 0x61, 0x70, 0x69, 0x5f, 0x76, 0x32, 0x42, 0x0c, 0x4f, 0x70, 0x65, 0x6e, 0x41, 0x50, 0x49, 0x50, + 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x16, 0x2e, 0x2f, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, + 0x69, 0x76, 0x32, 0x3b, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x32, 0xa2, 0x02, + 0x03, 0x4f, 0x41, 0x53, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_openapiv2_OpenAPIv2_proto_rawDescOnce sync.Once + file_openapiv2_OpenAPIv2_proto_rawDescData = file_openapiv2_OpenAPIv2_proto_rawDesc +) + +func file_openapiv2_OpenAPIv2_proto_rawDescGZIP() []byte { + file_openapiv2_OpenAPIv2_proto_rawDescOnce.Do(func() { + file_openapiv2_OpenAPIv2_proto_rawDescData = protoimpl.X.CompressGZIP(file_openapiv2_OpenAPIv2_proto_rawDescData) + }) + return file_openapiv2_OpenAPIv2_proto_rawDescData +} + +var file_openapiv2_OpenAPIv2_proto_msgTypes = make([]protoimpl.MessageInfo, 60) +var file_openapiv2_OpenAPIv2_proto_goTypes = []interface{}{ + (*AdditionalPropertiesItem)(nil), // 0: openapi.v2.AdditionalPropertiesItem + (*Any)(nil), // 1: openapi.v2.Any + (*ApiKeySecurity)(nil), // 2: openapi.v2.ApiKeySecurity + (*BasicAuthenticationSecurity)(nil), // 3: openapi.v2.BasicAuthenticationSecurity + (*BodyParameter)(nil), // 4: openapi.v2.BodyParameter + (*Contact)(nil), // 5: openapi.v2.Contact + (*Default)(nil), // 6: openapi.v2.Default + (*Definitions)(nil), // 7: openapi.v2.Definitions + (*Document)(nil), // 8: openapi.v2.Document + (*Examples)(nil), // 9: openapi.v2.Examples + (*ExternalDocs)(nil), // 10: openapi.v2.ExternalDocs + (*FileSchema)(nil), // 11: openapi.v2.FileSchema + (*FormDataParameterSubSchema)(nil), // 12: openapi.v2.FormDataParameterSubSchema + (*Header)(nil), // 13: openapi.v2.Header + (*HeaderParameterSubSchema)(nil), // 14: openapi.v2.HeaderParameterSubSchema + (*Headers)(nil), // 15: openapi.v2.Headers + (*Info)(nil), // 16: openapi.v2.Info + (*ItemsItem)(nil), // 17: openapi.v2.ItemsItem + (*JsonReference)(nil), // 18: openapi.v2.JsonReference + (*License)(nil), // 19: openapi.v2.License + (*NamedAny)(nil), // 20: openapi.v2.NamedAny + (*NamedHeader)(nil), // 21: openapi.v2.NamedHeader + (*NamedParameter)(nil), // 22: openapi.v2.NamedParameter + (*NamedPathItem)(nil), // 23: openapi.v2.NamedPathItem + (*NamedResponse)(nil), // 24: openapi.v2.NamedResponse + (*NamedResponseValue)(nil), // 25: openapi.v2.NamedResponseValue + (*NamedSchema)(nil), // 26: openapi.v2.NamedSchema + (*NamedSecurityDefinitionsItem)(nil), // 27: openapi.v2.NamedSecurityDefinitionsItem + (*NamedString)(nil), // 28: openapi.v2.NamedString + (*NamedStringArray)(nil), // 29: openapi.v2.NamedStringArray + (*NonBodyParameter)(nil), // 30: openapi.v2.NonBodyParameter + (*Oauth2AccessCodeSecurity)(nil), // 31: openapi.v2.Oauth2AccessCodeSecurity + (*Oauth2ApplicationSecurity)(nil), // 32: openapi.v2.Oauth2ApplicationSecurity + (*Oauth2ImplicitSecurity)(nil), // 33: openapi.v2.Oauth2ImplicitSecurity + (*Oauth2PasswordSecurity)(nil), // 34: openapi.v2.Oauth2PasswordSecurity + (*Oauth2Scopes)(nil), // 35: openapi.v2.Oauth2Scopes + (*Operation)(nil), // 36: openapi.v2.Operation + (*Parameter)(nil), // 37: openapi.v2.Parameter + (*ParameterDefinitions)(nil), // 38: openapi.v2.ParameterDefinitions + (*ParametersItem)(nil), // 39: openapi.v2.ParametersItem + (*PathItem)(nil), // 40: openapi.v2.PathItem + (*PathParameterSubSchema)(nil), // 41: openapi.v2.PathParameterSubSchema + (*Paths)(nil), // 42: openapi.v2.Paths + (*PrimitivesItems)(nil), // 43: openapi.v2.PrimitivesItems + (*Properties)(nil), // 44: openapi.v2.Properties + (*QueryParameterSubSchema)(nil), // 45: openapi.v2.QueryParameterSubSchema + (*Response)(nil), // 46: openapi.v2.Response + (*ResponseDefinitions)(nil), // 47: openapi.v2.ResponseDefinitions + (*ResponseValue)(nil), // 48: openapi.v2.ResponseValue + (*Responses)(nil), // 49: openapi.v2.Responses + (*Schema)(nil), // 50: openapi.v2.Schema + (*SchemaItem)(nil), // 51: openapi.v2.SchemaItem + (*SecurityDefinitions)(nil), // 52: openapi.v2.SecurityDefinitions + (*SecurityDefinitionsItem)(nil), // 53: openapi.v2.SecurityDefinitionsItem + (*SecurityRequirement)(nil), // 54: openapi.v2.SecurityRequirement + (*StringArray)(nil), // 55: openapi.v2.StringArray + (*Tag)(nil), // 56: openapi.v2.Tag + (*TypeItem)(nil), // 57: openapi.v2.TypeItem + (*VendorExtension)(nil), // 58: openapi.v2.VendorExtension + (*Xml)(nil), // 59: openapi.v2.Xml + (*anypb.Any)(nil), // 60: google.protobuf.Any +} +var file_openapiv2_OpenAPIv2_proto_depIdxs = []int32{ + 50, // 0: openapi.v2.AdditionalPropertiesItem.schema:type_name -> openapi.v2.Schema + 60, // 1: openapi.v2.Any.value:type_name -> google.protobuf.Any + 20, // 2: openapi.v2.ApiKeySecurity.vendor_extension:type_name -> openapi.v2.NamedAny + 20, // 3: openapi.v2.BasicAuthenticationSecurity.vendor_extension:type_name -> openapi.v2.NamedAny + 50, // 4: openapi.v2.BodyParameter.schema:type_name -> openapi.v2.Schema + 20, // 5: openapi.v2.BodyParameter.vendor_extension:type_name -> openapi.v2.NamedAny + 20, // 6: openapi.v2.Contact.vendor_extension:type_name -> openapi.v2.NamedAny + 20, // 7: openapi.v2.Default.additional_properties:type_name -> openapi.v2.NamedAny + 26, // 8: openapi.v2.Definitions.additional_properties:type_name -> openapi.v2.NamedSchema + 16, // 9: openapi.v2.Document.info:type_name -> openapi.v2.Info + 42, // 10: openapi.v2.Document.paths:type_name -> openapi.v2.Paths + 7, // 11: openapi.v2.Document.definitions:type_name -> openapi.v2.Definitions + 38, // 12: openapi.v2.Document.parameters:type_name -> openapi.v2.ParameterDefinitions + 47, // 13: openapi.v2.Document.responses:type_name -> openapi.v2.ResponseDefinitions + 54, // 14: openapi.v2.Document.security:type_name -> openapi.v2.SecurityRequirement + 52, // 15: openapi.v2.Document.security_definitions:type_name -> openapi.v2.SecurityDefinitions + 56, // 16: openapi.v2.Document.tags:type_name -> openapi.v2.Tag + 10, // 17: openapi.v2.Document.external_docs:type_name -> openapi.v2.ExternalDocs + 20, // 18: openapi.v2.Document.vendor_extension:type_name -> openapi.v2.NamedAny + 20, // 19: openapi.v2.Examples.additional_properties:type_name -> openapi.v2.NamedAny + 20, // 20: openapi.v2.ExternalDocs.vendor_extension:type_name -> openapi.v2.NamedAny + 1, // 21: openapi.v2.FileSchema.default:type_name -> openapi.v2.Any + 10, // 22: openapi.v2.FileSchema.external_docs:type_name -> openapi.v2.ExternalDocs + 1, // 23: openapi.v2.FileSchema.example:type_name -> openapi.v2.Any + 20, // 24: openapi.v2.FileSchema.vendor_extension:type_name -> openapi.v2.NamedAny + 43, // 25: openapi.v2.FormDataParameterSubSchema.items:type_name -> openapi.v2.PrimitivesItems + 1, // 26: openapi.v2.FormDataParameterSubSchema.default:type_name -> openapi.v2.Any + 1, // 27: openapi.v2.FormDataParameterSubSchema.enum:type_name -> openapi.v2.Any + 20, // 28: openapi.v2.FormDataParameterSubSchema.vendor_extension:type_name -> openapi.v2.NamedAny + 43, // 29: openapi.v2.Header.items:type_name -> openapi.v2.PrimitivesItems + 1, // 30: openapi.v2.Header.default:type_name -> openapi.v2.Any + 1, // 31: openapi.v2.Header.enum:type_name -> openapi.v2.Any + 20, // 32: openapi.v2.Header.vendor_extension:type_name -> openapi.v2.NamedAny + 43, // 33: openapi.v2.HeaderParameterSubSchema.items:type_name -> openapi.v2.PrimitivesItems + 1, // 34: openapi.v2.HeaderParameterSubSchema.default:type_name -> openapi.v2.Any + 1, // 35: openapi.v2.HeaderParameterSubSchema.enum:type_name -> openapi.v2.Any + 20, // 36: openapi.v2.HeaderParameterSubSchema.vendor_extension:type_name -> openapi.v2.NamedAny + 21, // 37: openapi.v2.Headers.additional_properties:type_name -> openapi.v2.NamedHeader + 5, // 38: openapi.v2.Info.contact:type_name -> openapi.v2.Contact + 19, // 39: openapi.v2.Info.license:type_name -> openapi.v2.License + 20, // 40: openapi.v2.Info.vendor_extension:type_name -> openapi.v2.NamedAny + 50, // 41: openapi.v2.ItemsItem.schema:type_name -> openapi.v2.Schema + 20, // 42: openapi.v2.License.vendor_extension:type_name -> openapi.v2.NamedAny + 1, // 43: openapi.v2.NamedAny.value:type_name -> openapi.v2.Any + 13, // 44: openapi.v2.NamedHeader.value:type_name -> openapi.v2.Header + 37, // 45: openapi.v2.NamedParameter.value:type_name -> openapi.v2.Parameter + 40, // 46: openapi.v2.NamedPathItem.value:type_name -> openapi.v2.PathItem + 46, // 47: openapi.v2.NamedResponse.value:type_name -> openapi.v2.Response + 48, // 48: openapi.v2.NamedResponseValue.value:type_name -> openapi.v2.ResponseValue + 50, // 49: openapi.v2.NamedSchema.value:type_name -> openapi.v2.Schema + 53, // 50: openapi.v2.NamedSecurityDefinitionsItem.value:type_name -> openapi.v2.SecurityDefinitionsItem + 55, // 51: openapi.v2.NamedStringArray.value:type_name -> openapi.v2.StringArray + 14, // 52: openapi.v2.NonBodyParameter.header_parameter_sub_schema:type_name -> openapi.v2.HeaderParameterSubSchema + 12, // 53: openapi.v2.NonBodyParameter.form_data_parameter_sub_schema:type_name -> openapi.v2.FormDataParameterSubSchema + 45, // 54: openapi.v2.NonBodyParameter.query_parameter_sub_schema:type_name -> openapi.v2.QueryParameterSubSchema + 41, // 55: openapi.v2.NonBodyParameter.path_parameter_sub_schema:type_name -> openapi.v2.PathParameterSubSchema + 35, // 56: openapi.v2.Oauth2AccessCodeSecurity.scopes:type_name -> openapi.v2.Oauth2Scopes + 20, // 57: openapi.v2.Oauth2AccessCodeSecurity.vendor_extension:type_name -> openapi.v2.NamedAny + 35, // 58: openapi.v2.Oauth2ApplicationSecurity.scopes:type_name -> openapi.v2.Oauth2Scopes + 20, // 59: openapi.v2.Oauth2ApplicationSecurity.vendor_extension:type_name -> openapi.v2.NamedAny + 35, // 60: openapi.v2.Oauth2ImplicitSecurity.scopes:type_name -> openapi.v2.Oauth2Scopes + 20, // 61: openapi.v2.Oauth2ImplicitSecurity.vendor_extension:type_name -> openapi.v2.NamedAny + 35, // 62: openapi.v2.Oauth2PasswordSecurity.scopes:type_name -> openapi.v2.Oauth2Scopes + 20, // 63: openapi.v2.Oauth2PasswordSecurity.vendor_extension:type_name -> openapi.v2.NamedAny + 28, // 64: openapi.v2.Oauth2Scopes.additional_properties:type_name -> openapi.v2.NamedString + 10, // 65: openapi.v2.Operation.external_docs:type_name -> openapi.v2.ExternalDocs + 39, // 66: openapi.v2.Operation.parameters:type_name -> openapi.v2.ParametersItem + 49, // 67: openapi.v2.Operation.responses:type_name -> openapi.v2.Responses + 54, // 68: openapi.v2.Operation.security:type_name -> openapi.v2.SecurityRequirement + 20, // 69: openapi.v2.Operation.vendor_extension:type_name -> openapi.v2.NamedAny + 4, // 70: openapi.v2.Parameter.body_parameter:type_name -> openapi.v2.BodyParameter + 30, // 71: openapi.v2.Parameter.non_body_parameter:type_name -> openapi.v2.NonBodyParameter + 22, // 72: openapi.v2.ParameterDefinitions.additional_properties:type_name -> openapi.v2.NamedParameter + 37, // 73: openapi.v2.ParametersItem.parameter:type_name -> openapi.v2.Parameter + 18, // 74: openapi.v2.ParametersItem.json_reference:type_name -> openapi.v2.JsonReference + 36, // 75: openapi.v2.PathItem.get:type_name -> openapi.v2.Operation + 36, // 76: openapi.v2.PathItem.put:type_name -> openapi.v2.Operation + 36, // 77: openapi.v2.PathItem.post:type_name -> openapi.v2.Operation + 36, // 78: openapi.v2.PathItem.delete:type_name -> openapi.v2.Operation + 36, // 79: openapi.v2.PathItem.options:type_name -> openapi.v2.Operation + 36, // 80: openapi.v2.PathItem.head:type_name -> openapi.v2.Operation + 36, // 81: openapi.v2.PathItem.patch:type_name -> openapi.v2.Operation + 39, // 82: openapi.v2.PathItem.parameters:type_name -> openapi.v2.ParametersItem + 20, // 83: openapi.v2.PathItem.vendor_extension:type_name -> openapi.v2.NamedAny + 43, // 84: openapi.v2.PathParameterSubSchema.items:type_name -> openapi.v2.PrimitivesItems + 1, // 85: openapi.v2.PathParameterSubSchema.default:type_name -> openapi.v2.Any + 1, // 86: openapi.v2.PathParameterSubSchema.enum:type_name -> openapi.v2.Any + 20, // 87: openapi.v2.PathParameterSubSchema.vendor_extension:type_name -> openapi.v2.NamedAny + 20, // 88: openapi.v2.Paths.vendor_extension:type_name -> openapi.v2.NamedAny + 23, // 89: openapi.v2.Paths.path:type_name -> openapi.v2.NamedPathItem + 43, // 90: openapi.v2.PrimitivesItems.items:type_name -> openapi.v2.PrimitivesItems + 1, // 91: openapi.v2.PrimitivesItems.default:type_name -> openapi.v2.Any + 1, // 92: openapi.v2.PrimitivesItems.enum:type_name -> openapi.v2.Any + 20, // 93: openapi.v2.PrimitivesItems.vendor_extension:type_name -> openapi.v2.NamedAny + 26, // 94: openapi.v2.Properties.additional_properties:type_name -> openapi.v2.NamedSchema + 43, // 95: openapi.v2.QueryParameterSubSchema.items:type_name -> openapi.v2.PrimitivesItems + 1, // 96: openapi.v2.QueryParameterSubSchema.default:type_name -> openapi.v2.Any + 1, // 97: openapi.v2.QueryParameterSubSchema.enum:type_name -> openapi.v2.Any + 20, // 98: openapi.v2.QueryParameterSubSchema.vendor_extension:type_name -> openapi.v2.NamedAny + 51, // 99: openapi.v2.Response.schema:type_name -> openapi.v2.SchemaItem + 15, // 100: openapi.v2.Response.headers:type_name -> openapi.v2.Headers + 9, // 101: openapi.v2.Response.examples:type_name -> openapi.v2.Examples + 20, // 102: openapi.v2.Response.vendor_extension:type_name -> openapi.v2.NamedAny + 24, // 103: openapi.v2.ResponseDefinitions.additional_properties:type_name -> openapi.v2.NamedResponse + 46, // 104: openapi.v2.ResponseValue.response:type_name -> openapi.v2.Response + 18, // 105: openapi.v2.ResponseValue.json_reference:type_name -> openapi.v2.JsonReference + 25, // 106: openapi.v2.Responses.response_code:type_name -> openapi.v2.NamedResponseValue + 20, // 107: openapi.v2.Responses.vendor_extension:type_name -> openapi.v2.NamedAny + 1, // 108: openapi.v2.Schema.default:type_name -> openapi.v2.Any + 1, // 109: openapi.v2.Schema.enum:type_name -> openapi.v2.Any + 0, // 110: openapi.v2.Schema.additional_properties:type_name -> openapi.v2.AdditionalPropertiesItem + 57, // 111: openapi.v2.Schema.type:type_name -> openapi.v2.TypeItem + 17, // 112: openapi.v2.Schema.items:type_name -> openapi.v2.ItemsItem + 50, // 113: openapi.v2.Schema.all_of:type_name -> openapi.v2.Schema + 44, // 114: openapi.v2.Schema.properties:type_name -> openapi.v2.Properties + 59, // 115: openapi.v2.Schema.xml:type_name -> openapi.v2.Xml + 10, // 116: openapi.v2.Schema.external_docs:type_name -> openapi.v2.ExternalDocs + 1, // 117: openapi.v2.Schema.example:type_name -> openapi.v2.Any + 20, // 118: openapi.v2.Schema.vendor_extension:type_name -> openapi.v2.NamedAny + 50, // 119: openapi.v2.SchemaItem.schema:type_name -> openapi.v2.Schema + 11, // 120: openapi.v2.SchemaItem.file_schema:type_name -> openapi.v2.FileSchema + 27, // 121: openapi.v2.SecurityDefinitions.additional_properties:type_name -> openapi.v2.NamedSecurityDefinitionsItem + 3, // 122: openapi.v2.SecurityDefinitionsItem.basic_authentication_security:type_name -> openapi.v2.BasicAuthenticationSecurity + 2, // 123: openapi.v2.SecurityDefinitionsItem.api_key_security:type_name -> openapi.v2.ApiKeySecurity + 33, // 124: openapi.v2.SecurityDefinitionsItem.oauth2_implicit_security:type_name -> openapi.v2.Oauth2ImplicitSecurity + 34, // 125: openapi.v2.SecurityDefinitionsItem.oauth2_password_security:type_name -> openapi.v2.Oauth2PasswordSecurity + 32, // 126: openapi.v2.SecurityDefinitionsItem.oauth2_application_security:type_name -> openapi.v2.Oauth2ApplicationSecurity + 31, // 127: openapi.v2.SecurityDefinitionsItem.oauth2_access_code_security:type_name -> openapi.v2.Oauth2AccessCodeSecurity + 29, // 128: openapi.v2.SecurityRequirement.additional_properties:type_name -> openapi.v2.NamedStringArray + 10, // 129: openapi.v2.Tag.external_docs:type_name -> openapi.v2.ExternalDocs + 20, // 130: openapi.v2.Tag.vendor_extension:type_name -> openapi.v2.NamedAny + 20, // 131: openapi.v2.VendorExtension.additional_properties:type_name -> openapi.v2.NamedAny + 20, // 132: openapi.v2.Xml.vendor_extension:type_name -> openapi.v2.NamedAny + 133, // [133:133] is the sub-list for method output_type + 133, // [133:133] is the sub-list for method input_type + 133, // [133:133] is the sub-list for extension type_name + 133, // [133:133] is the sub-list for extension extendee + 0, // [0:133] is the sub-list for field type_name +} + +func init() { file_openapiv2_OpenAPIv2_proto_init() } +func file_openapiv2_OpenAPIv2_proto_init() { + if File_openapiv2_OpenAPIv2_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_openapiv2_OpenAPIv2_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AdditionalPropertiesItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Any); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ApiKeySecurity); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BasicAuthenticationSecurity); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BodyParameter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Contact); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Default); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Definitions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Document); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Examples); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExternalDocs); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileSchema); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FormDataParameterSubSchema); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Header); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*HeaderParameterSubSchema); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Headers); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Info); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ItemsItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JsonReference); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*License); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedAny); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedHeader); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedParameter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedPathItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedResponseValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedSchema); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedSecurityDefinitionsItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedString); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NamedStringArray); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NonBodyParameter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Oauth2AccessCodeSecurity); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Oauth2ApplicationSecurity); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Oauth2ImplicitSecurity); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Oauth2PasswordSecurity); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Oauth2Scopes); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Operation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Parameter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ParameterDefinitions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ParametersItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PathItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PathParameterSubSchema); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Paths); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PrimitivesItems); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Properties); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[45].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryParameterSubSchema); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Response); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[47].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ResponseDefinitions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ResponseValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Responses); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[50].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Schema); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[51].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SchemaItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[52].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SecurityDefinitions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[53].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SecurityDefinitionsItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[54].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SecurityRequirement); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[55].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StringArray); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[56].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Tag); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[57].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TypeItem); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[58].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VendorExtension); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[59].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Xml); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_openapiv2_OpenAPIv2_proto_msgTypes[0].OneofWrappers = []interface{}{ + (*AdditionalPropertiesItem_Schema)(nil), + (*AdditionalPropertiesItem_Boolean)(nil), + } + file_openapiv2_OpenAPIv2_proto_msgTypes[30].OneofWrappers = []interface{}{ + (*NonBodyParameter_HeaderParameterSubSchema)(nil), + (*NonBodyParameter_FormDataParameterSubSchema)(nil), + (*NonBodyParameter_QueryParameterSubSchema)(nil), + (*NonBodyParameter_PathParameterSubSchema)(nil), + } + file_openapiv2_OpenAPIv2_proto_msgTypes[37].OneofWrappers = []interface{}{ + (*Parameter_BodyParameter)(nil), + (*Parameter_NonBodyParameter)(nil), + } + file_openapiv2_OpenAPIv2_proto_msgTypes[39].OneofWrappers = []interface{}{ + (*ParametersItem_Parameter)(nil), + (*ParametersItem_JsonReference)(nil), + } + file_openapiv2_OpenAPIv2_proto_msgTypes[48].OneofWrappers = []interface{}{ + (*ResponseValue_Response)(nil), + (*ResponseValue_JsonReference)(nil), + } + file_openapiv2_OpenAPIv2_proto_msgTypes[51].OneofWrappers = []interface{}{ + (*SchemaItem_Schema)(nil), + (*SchemaItem_FileSchema)(nil), + } + file_openapiv2_OpenAPIv2_proto_msgTypes[53].OneofWrappers = []interface{}{ + (*SecurityDefinitionsItem_BasicAuthenticationSecurity)(nil), + (*SecurityDefinitionsItem_ApiKeySecurity)(nil), + (*SecurityDefinitionsItem_Oauth2ImplicitSecurity)(nil), + (*SecurityDefinitionsItem_Oauth2PasswordSecurity)(nil), + (*SecurityDefinitionsItem_Oauth2ApplicationSecurity)(nil), + (*SecurityDefinitionsItem_Oauth2AccessCodeSecurity)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_openapiv2_OpenAPIv2_proto_rawDesc, + NumEnums: 0, + NumMessages: 60, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_openapiv2_OpenAPIv2_proto_goTypes, + DependencyIndexes: file_openapiv2_OpenAPIv2_proto_depIdxs, + MessageInfos: file_openapiv2_OpenAPIv2_proto_msgTypes, + }.Build() + File_openapiv2_OpenAPIv2_proto = out.File + file_openapiv2_OpenAPIv2_proto_rawDesc = nil + file_openapiv2_OpenAPIv2_proto_goTypes = nil + file_openapiv2_OpenAPIv2_proto_depIdxs = nil +} diff --git a/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.proto b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.proto new file mode 100644 index 00000000..1c59b2f4 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/OpenAPIv2.proto @@ -0,0 +1,666 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// THIS FILE IS AUTOMATICALLY GENERATED. + +syntax = "proto3"; + +package openapi.v2; + +import "google/protobuf/any.proto"; + +// This option lets the proto compiler generate Java code inside the package +// name (see below) instead of inside an outer class. It creates a simpler +// developer experience by reducing one-level of name nesting and be +// consistent with most programming languages that don't support outer classes. +option java_multiple_files = true; + +// The Java outer classname should be the filename in UpperCamelCase. This +// class is only used to hold proto descriptor, so developers don't need to +// work with it directly. +option java_outer_classname = "OpenAPIProto"; + +// The Java package name must be proto package name with proper prefix. +option java_package = "org.openapi_v2"; + +// A reasonable prefix for the Objective-C symbols generated from the package. +// It should at a minimum be 3 characters long, all uppercase, and convention +// is to use an abbreviation of the package name. Something short, but +// hopefully unique enough to not conflict with things that may come along in +// the future. 'GPB' is reserved for the protocol buffer implementation itself. +option objc_class_prefix = "OAS"; + +// The Go package name. +option go_package = "./openapiv2;openapi_v2"; + +message AdditionalPropertiesItem { + oneof oneof { + Schema schema = 1; + bool boolean = 2; + } +} + +message Any { + google.protobuf.Any value = 1; + string yaml = 2; +} + +message ApiKeySecurity { + string type = 1; + string name = 2; + string in = 3; + string description = 4; + repeated NamedAny vendor_extension = 5; +} + +message BasicAuthenticationSecurity { + string type = 1; + string description = 2; + repeated NamedAny vendor_extension = 3; +} + +message BodyParameter { + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 1; + // The name of the parameter. + string name = 2; + // Determines the location of the parameter. + string in = 3; + // Determines whether or not this parameter is required or optional. + bool required = 4; + Schema schema = 5; + repeated NamedAny vendor_extension = 6; +} + +// Contact information for the owners of the API. +message Contact { + // The identifying name of the contact person/organization. + string name = 1; + // The URL pointing to the contact information. + string url = 2; + // The email address of the contact person/organization. + string email = 3; + repeated NamedAny vendor_extension = 4; +} + +message Default { + repeated NamedAny additional_properties = 1; +} + +// One or more JSON objects describing the schemas being consumed and produced by the API. +message Definitions { + repeated NamedSchema additional_properties = 1; +} + +message Document { + // The Swagger version of this document. + string swagger = 1; + Info info = 2; + // The host (name or ip) of the API. Example: 'swagger.io' + string host = 3; + // The base path to the API. Example: '/api'. + string base_path = 4; + // The transfer protocol of the API. + repeated string schemes = 5; + // A list of MIME types accepted by the API. + repeated string consumes = 6; + // A list of MIME types the API can produce. + repeated string produces = 7; + Paths paths = 8; + Definitions definitions = 9; + ParameterDefinitions parameters = 10; + ResponseDefinitions responses = 11; + repeated SecurityRequirement security = 12; + SecurityDefinitions security_definitions = 13; + repeated Tag tags = 14; + ExternalDocs external_docs = 15; + repeated NamedAny vendor_extension = 16; +} + +message Examples { + repeated NamedAny additional_properties = 1; +} + +// information about external documentation +message ExternalDocs { + string description = 1; + string url = 2; + repeated NamedAny vendor_extension = 3; +} + +// A deterministic version of a JSON Schema object. +message FileSchema { + string format = 1; + string title = 2; + string description = 3; + Any default = 4; + repeated string required = 5; + string type = 6; + bool read_only = 7; + ExternalDocs external_docs = 8; + Any example = 9; + repeated NamedAny vendor_extension = 10; +} + +message FormDataParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + // allows sending a parameter by name only or with an empty value. + bool allow_empty_value = 5; + string type = 6; + string format = 7; + PrimitivesItems items = 8; + string collection_format = 9; + Any default = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + int64 max_length = 15; + int64 min_length = 16; + string pattern = 17; + int64 max_items = 18; + int64 min_items = 19; + bool unique_items = 20; + repeated Any enum = 21; + double multiple_of = 22; + repeated NamedAny vendor_extension = 23; +} + +message Header { + string type = 1; + string format = 2; + PrimitivesItems items = 3; + string collection_format = 4; + Any default = 5; + double maximum = 6; + bool exclusive_maximum = 7; + double minimum = 8; + bool exclusive_minimum = 9; + int64 max_length = 10; + int64 min_length = 11; + string pattern = 12; + int64 max_items = 13; + int64 min_items = 14; + bool unique_items = 15; + repeated Any enum = 16; + double multiple_of = 17; + string description = 18; + repeated NamedAny vendor_extension = 19; +} + +message HeaderParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + string type = 5; + string format = 6; + PrimitivesItems items = 7; + string collection_format = 8; + Any default = 9; + double maximum = 10; + bool exclusive_maximum = 11; + double minimum = 12; + bool exclusive_minimum = 13; + int64 max_length = 14; + int64 min_length = 15; + string pattern = 16; + int64 max_items = 17; + int64 min_items = 18; + bool unique_items = 19; + repeated Any enum = 20; + double multiple_of = 21; + repeated NamedAny vendor_extension = 22; +} + +message Headers { + repeated NamedHeader additional_properties = 1; +} + +// General information about the API. +message Info { + // A unique and precise title of the API. + string title = 1; + // A semantic version number of the API. + string version = 2; + // A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed. + string description = 3; + // The terms of service for the API. + string terms_of_service = 4; + Contact contact = 5; + License license = 6; + repeated NamedAny vendor_extension = 7; +} + +message ItemsItem { + repeated Schema schema = 1; +} + +message JsonReference { + string _ref = 1; + string description = 2; +} + +message License { + // The name of the license type. It's encouraged to use an OSI compatible license. + string name = 1; + // The URL pointing to the license. + string url = 2; + repeated NamedAny vendor_extension = 3; +} + +// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs. +message NamedAny { + // Map key + string name = 1; + // Mapped value + Any value = 2; +} + +// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs. +message NamedHeader { + // Map key + string name = 1; + // Mapped value + Header value = 2; +} + +// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs. +message NamedParameter { + // Map key + string name = 1; + // Mapped value + Parameter value = 2; +} + +// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs. +message NamedPathItem { + // Map key + string name = 1; + // Mapped value + PathItem value = 2; +} + +// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs. +message NamedResponse { + // Map key + string name = 1; + // Mapped value + Response value = 2; +} + +// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs. +message NamedResponseValue { + // Map key + string name = 1; + // Mapped value + ResponseValue value = 2; +} + +// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs. +message NamedSchema { + // Map key + string name = 1; + // Mapped value + Schema value = 2; +} + +// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs. +message NamedSecurityDefinitionsItem { + // Map key + string name = 1; + // Mapped value + SecurityDefinitionsItem value = 2; +} + +// Automatically-generated message used to represent maps of string as ordered (name,value) pairs. +message NamedString { + // Map key + string name = 1; + // Mapped value + string value = 2; +} + +// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs. +message NamedStringArray { + // Map key + string name = 1; + // Mapped value + StringArray value = 2; +} + +message NonBodyParameter { + oneof oneof { + HeaderParameterSubSchema header_parameter_sub_schema = 1; + FormDataParameterSubSchema form_data_parameter_sub_schema = 2; + QueryParameterSubSchema query_parameter_sub_schema = 3; + PathParameterSubSchema path_parameter_sub_schema = 4; + } +} + +message Oauth2AccessCodeSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string authorization_url = 4; + string token_url = 5; + string description = 6; + repeated NamedAny vendor_extension = 7; +} + +message Oauth2ApplicationSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string token_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2ImplicitSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string authorization_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2PasswordSecurity { + string type = 1; + string flow = 2; + Oauth2Scopes scopes = 3; + string token_url = 4; + string description = 5; + repeated NamedAny vendor_extension = 6; +} + +message Oauth2Scopes { + repeated NamedString additional_properties = 1; +} + +message Operation { + repeated string tags = 1; + // A brief summary of the operation. + string summary = 2; + // A longer description of the operation, GitHub Flavored Markdown is allowed. + string description = 3; + ExternalDocs external_docs = 4; + // A unique identifier of the operation. + string operation_id = 5; + // A list of MIME types the API can produce. + repeated string produces = 6; + // A list of MIME types the API can consume. + repeated string consumes = 7; + // The parameters needed to send a valid API call. + repeated ParametersItem parameters = 8; + Responses responses = 9; + // The transfer protocol of the API. + repeated string schemes = 10; + bool deprecated = 11; + repeated SecurityRequirement security = 12; + repeated NamedAny vendor_extension = 13; +} + +message Parameter { + oneof oneof { + BodyParameter body_parameter = 1; + NonBodyParameter non_body_parameter = 2; + } +} + +// One or more JSON representations for parameters +message ParameterDefinitions { + repeated NamedParameter additional_properties = 1; +} + +message ParametersItem { + oneof oneof { + Parameter parameter = 1; + JsonReference json_reference = 2; + } +} + +message PathItem { + string _ref = 1; + Operation get = 2; + Operation put = 3; + Operation post = 4; + Operation delete = 5; + Operation options = 6; + Operation head = 7; + Operation patch = 8; + // The parameters needed to send a valid API call. + repeated ParametersItem parameters = 9; + repeated NamedAny vendor_extension = 10; +} + +message PathParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + string type = 5; + string format = 6; + PrimitivesItems items = 7; + string collection_format = 8; + Any default = 9; + double maximum = 10; + bool exclusive_maximum = 11; + double minimum = 12; + bool exclusive_minimum = 13; + int64 max_length = 14; + int64 min_length = 15; + string pattern = 16; + int64 max_items = 17; + int64 min_items = 18; + bool unique_items = 19; + repeated Any enum = 20; + double multiple_of = 21; + repeated NamedAny vendor_extension = 22; +} + +// Relative paths to the individual endpoints. They must be relative to the 'basePath'. +message Paths { + repeated NamedAny vendor_extension = 1; + repeated NamedPathItem path = 2; +} + +message PrimitivesItems { + string type = 1; + string format = 2; + PrimitivesItems items = 3; + string collection_format = 4; + Any default = 5; + double maximum = 6; + bool exclusive_maximum = 7; + double minimum = 8; + bool exclusive_minimum = 9; + int64 max_length = 10; + int64 min_length = 11; + string pattern = 12; + int64 max_items = 13; + int64 min_items = 14; + bool unique_items = 15; + repeated Any enum = 16; + double multiple_of = 17; + repeated NamedAny vendor_extension = 18; +} + +message Properties { + repeated NamedSchema additional_properties = 1; +} + +message QueryParameterSubSchema { + // Determines whether or not this parameter is required or optional. + bool required = 1; + // Determines the location of the parameter. + string in = 2; + // A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed. + string description = 3; + // The name of the parameter. + string name = 4; + // allows sending a parameter by name only or with an empty value. + bool allow_empty_value = 5; + string type = 6; + string format = 7; + PrimitivesItems items = 8; + string collection_format = 9; + Any default = 10; + double maximum = 11; + bool exclusive_maximum = 12; + double minimum = 13; + bool exclusive_minimum = 14; + int64 max_length = 15; + int64 min_length = 16; + string pattern = 17; + int64 max_items = 18; + int64 min_items = 19; + bool unique_items = 20; + repeated Any enum = 21; + double multiple_of = 22; + repeated NamedAny vendor_extension = 23; +} + +message Response { + string description = 1; + SchemaItem schema = 2; + Headers headers = 3; + Examples examples = 4; + repeated NamedAny vendor_extension = 5; +} + +// One or more JSON representations for responses +message ResponseDefinitions { + repeated NamedResponse additional_properties = 1; +} + +message ResponseValue { + oneof oneof { + Response response = 1; + JsonReference json_reference = 2; + } +} + +// Response objects names can either be any valid HTTP status code or 'default'. +message Responses { + repeated NamedResponseValue response_code = 1; + repeated NamedAny vendor_extension = 2; +} + +// A deterministic version of a JSON Schema object. +message Schema { + string _ref = 1; + string format = 2; + string title = 3; + string description = 4; + Any default = 5; + double multiple_of = 6; + double maximum = 7; + bool exclusive_maximum = 8; + double minimum = 9; + bool exclusive_minimum = 10; + int64 max_length = 11; + int64 min_length = 12; + string pattern = 13; + int64 max_items = 14; + int64 min_items = 15; + bool unique_items = 16; + int64 max_properties = 17; + int64 min_properties = 18; + repeated string required = 19; + repeated Any enum = 20; + AdditionalPropertiesItem additional_properties = 21; + TypeItem type = 22; + ItemsItem items = 23; + repeated Schema all_of = 24; + Properties properties = 25; + string discriminator = 26; + bool read_only = 27; + Xml xml = 28; + ExternalDocs external_docs = 29; + Any example = 30; + repeated NamedAny vendor_extension = 31; +} + +message SchemaItem { + oneof oneof { + Schema schema = 1; + FileSchema file_schema = 2; + } +} + +message SecurityDefinitions { + repeated NamedSecurityDefinitionsItem additional_properties = 1; +} + +message SecurityDefinitionsItem { + oneof oneof { + BasicAuthenticationSecurity basic_authentication_security = 1; + ApiKeySecurity api_key_security = 2; + Oauth2ImplicitSecurity oauth2_implicit_security = 3; + Oauth2PasswordSecurity oauth2_password_security = 4; + Oauth2ApplicationSecurity oauth2_application_security = 5; + Oauth2AccessCodeSecurity oauth2_access_code_security = 6; + } +} + +message SecurityRequirement { + repeated NamedStringArray additional_properties = 1; +} + +message StringArray { + repeated string value = 1; +} + +message Tag { + string name = 1; + string description = 2; + ExternalDocs external_docs = 3; + repeated NamedAny vendor_extension = 4; +} + +message TypeItem { + repeated string value = 1; +} + +// Any property starting with x- is valid. +message VendorExtension { + repeated NamedAny additional_properties = 1; +} + +message Xml { + string name = 1; + string namespace = 2; + string prefix = 3; + bool attribute = 4; + bool wrapped = 5; + repeated NamedAny vendor_extension = 6; +} + diff --git a/vendor/github.com/google/gnostic-models/openapiv2/README.md b/vendor/github.com/google/gnostic-models/openapiv2/README.md new file mode 100644 index 00000000..5276128d --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/README.md @@ -0,0 +1,14 @@ +# OpenAPI v2 Protocol Buffer Models + +This directory contains a Protocol Buffer-language model and related code for +supporting OpenAPI v2. + +Gnostic applications and plugins can use OpenAPIv2.proto to generate Protocol +Buffer support code for their preferred languages. + +OpenAPIv2.go is used by Gnostic to read JSON and YAML OpenAPI descriptions into +the Protocol Buffer-based datastructures generated from OpenAPIv2.proto. + +OpenAPIv2.proto and OpenAPIv2.go are generated by the Gnostic compiler +generator, and OpenAPIv2.pb.go is generated by protoc, the Protocol Buffer +compiler, and protoc-gen-go, the Protocol Buffer Go code generation plugin. diff --git a/vendor/github.com/google/gnostic-models/openapiv2/document.go b/vendor/github.com/google/gnostic-models/openapiv2/document.go new file mode 100644 index 00000000..e96ac0d6 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/document.go @@ -0,0 +1,42 @@ +// Copyright 2020 Google LLC. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package openapi_v2 + +import ( + "gopkg.in/yaml.v3" + + "github.com/google/gnostic-models/compiler" +) + +// ParseDocument reads an OpenAPI v2 description from a YAML/JSON representation. +func ParseDocument(b []byte) (*Document, error) { + info, err := compiler.ReadInfoFromBytes("", b) + if err != nil { + return nil, err + } + root := info.Content[0] + return NewDocument(root, compiler.NewContextWithExtensions("$root", root, nil, nil)) +} + +// YAMLValue produces a serialized YAML representation of the document. +func (d *Document) YAMLValue(comment string) ([]byte, error) { + rawInfo := d.ToRawInfo() + rawInfo = &yaml.Node{ + Kind: yaml.DocumentNode, + Content: []*yaml.Node{rawInfo}, + HeadComment: comment, + } + return yaml.Marshal(rawInfo) +} diff --git a/vendor/github.com/google/gnostic-models/openapiv2/openapi-2.0.json b/vendor/github.com/google/gnostic-models/openapiv2/openapi-2.0.json new file mode 100644 index 00000000..afa12b79 --- /dev/null +++ b/vendor/github.com/google/gnostic-models/openapiv2/openapi-2.0.json @@ -0,0 +1,1610 @@ +{ + "title": "A JSON Schema for Swagger 2.0 API.", + "id": "http://swagger.io/v2/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "required": [ + "swagger", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "swagger": { + "type": "string", + "enum": [ + "2.0" + ], + "description": "The Swagger version of this document." + }, + "info": { + "$ref": "#/definitions/info" + }, + "host": { + "type": "string", + "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", + "description": "The host (name or ip) of the API. Example: 'swagger.io'" + }, + "basePath": { + "type": "string", + "pattern": "^/", + "description": "The base path to the API. Example: '/api'." + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "consumes": { + "description": "A list of MIME types accepted by the API.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "definitions": { + "$ref": "#/definitions/definitions" + }, + "parameters": { + "$ref": "#/definitions/parameterDefinitions" + }, + "responses": { + "$ref": "#/definitions/responseDefinitions" + }, + "security": { + "$ref": "#/definitions/security" + }, + "securityDefinitions": { + "$ref": "#/definitions/securityDefinitions" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "General information about the API.", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." + }, + "termsOfService": { + "type": "string", + "description": "The terms of service for the API." + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the owners of the API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "license": { + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "paths": { + "type": "object", + "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + }, + "^/": { + "$ref": "#/definitions/pathItem" + } + }, + "additionalProperties": false + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "description": "One or more JSON objects describing the schemas being consumed and produced by the API." + }, + "parameterDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + }, + "description": "One or more JSON representations for parameters" + }, + "responseDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/response" + }, + "description": "One or more JSON representations for responses" + }, + "externalDocs": { + "type": "object", + "additionalProperties": false, + "description": "information about external documentation", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "examples": { + "type": "object", + "additionalProperties": true + }, + "mimeType": { + "type": "string", + "description": "The MIME type of the HTTP message." + }, + "operation": { + "type": "object", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string", + "description": "A unique identifier of the operation." + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "consumes": { + "description": "A list of MIME types the API can consume.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "parameters": { + "$ref": "#/definitions/parametersList" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "security": { + "$ref": "#/definitions/security" + } + } + }, + "pathItem": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "parameters": { + "$ref": "#/definitions/parametersList" + } + } + }, + "responses": { + "type": "object", + "description": "Response objects names can either be any valid HTTP status code or 'default'.", + "minProperties": 1, + "additionalProperties": false, + "patternProperties": { + "^([0-9]{3})$|^(default)$": { + "$ref": "#/definitions/responseValue" + }, + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "not": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + } + }, + "responseValue": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "response": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + }, + "schema": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/fileSchema" + } + ] + }, + "headers": { + "$ref": "#/definitions/headers" + }, + "examples": { + "$ref": "#/definitions/examples" + } + }, + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "headers": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/header" + } + }, + "header": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "vendorExtension": { + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "bodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "schema" + ], + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "body" + ] + }, + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "schema": { + "$ref": "#/definitions/schema" + } + }, + "additionalProperties": false + }, + "headerParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "header" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "queryParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "query" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "formDataParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "formData" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array", + "file" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "pathParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "required" + ], + "properties": { + "required": { + "type": "boolean", + "enum": [ + true + ], + "description": "Determines whether or not this parameter is required or optional." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "path" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "nonBodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "type" + ], + "oneOf": [ + { + "$ref": "#/definitions/headerParameterSubSchema" + }, + { + "$ref": "#/definitions/formDataParameterSubSchema" + }, + { + "$ref": "#/definitions/queryParameterSubSchema" + }, + { + "$ref": "#/definitions/pathParameterSubSchema" + } + ] + }, + "parameter": { + "oneOf": [ + { + "$ref": "#/definitions/bodyParameter" + }, + { + "$ref": "#/definitions/nonBodyParameter" + } + ] + }, + "schema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "type": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/type" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "default": {} + }, + "discriminator": { + "type": "string" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "fileSchema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "type" + ], + "properties": { + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "type": { + "type": "string", + "enum": [ + "file" + ] + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "primitivesItems": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "securityRequirement": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "xml": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "tag": { + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "securityDefinitions": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/basicAuthenticationSecurity" + }, + { + "$ref": "#/definitions/apiKeySecurity" + }, + { + "$ref": "#/definitions/oauth2ImplicitSecurity" + }, + { + "$ref": "#/definitions/oauth2PasswordSecurity" + }, + { + "$ref": "#/definitions/oauth2ApplicationSecurity" + }, + { + "$ref": "#/definitions/oauth2AccessCodeSecurity" + } + ] + } + }, + "basicAuthenticationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "basic" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "apiKeySecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "apiKey" + ] + }, + "name": { + "type": "string" + }, + "in": { + "type": "string", + "enum": [ + "header", + "query" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ImplicitSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "implicit" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2PasswordSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "password" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ApplicationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "application" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2AccessCodeSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "accessCode" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2Scopes": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "mediaTypeList": { + "type": "array", + "items": { + "$ref": "#/definitions/mimeType" + }, + "uniqueItems": true + }, + "parametersList": { + "type": "array", + "description": "The parameters needed to send a valid API call.", + "additionalItems": false, + "items": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "uniqueItems": true + }, + "schemesList": { + "type": "array", + "description": "The transfer protocol of the API.", + "items": { + "type": "string", + "enum": [ + "http", + "https", + "ws", + "wss" + ] + }, + "uniqueItems": true + }, + "collectionFormat": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes" + ], + "default": "csv" + }, + "collectionFormatWithMulti": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes", + "multi" + ], + "default": "csv" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "jsonReference": { + "type": "object", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + }, + "description": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.go b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.go similarity index 99% rename from vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.go rename to vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.go index d54a84db..4b1131ce 100644 --- a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.go +++ b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.go @@ -23,7 +23,7 @@ import ( "gopkg.in/yaml.v3" - "github.com/google/gnostic/compiler" + "github.com/google/gnostic-models/compiler" ) // Version returns the package name (and OpenAPI version). @@ -8560,12 +8560,7 @@ func (m *Strings) ToRawInfo() *yaml.Node { if m == nil { return info } - if m.AdditionalProperties != nil { - for _, item := range m.AdditionalProperties { - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Name)) - info.Content = append(info.Content, compiler.NewScalarNodeForString(item.Value)) - } - } + // &{Name:additionalProperties Type:NamedString StringEnumValues:[] MapType:string Repeated:true Pattern: Implicit:true Description:} return info } diff --git a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pb.go b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.pb.go similarity index 99% rename from vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pb.go rename to vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.pb.go index 90a56f55..945b8d11 100644 --- a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.pb.go +++ b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.pb.go @@ -16,8 +16,8 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.28.0 -// protoc v3.19.4 +// protoc-gen-go v1.27.1 +// protoc v3.19.3 // source: openapiv3/OpenAPIv3.proto package openapi_v3 @@ -6760,13 +6760,12 @@ var file_openapiv3_OpenAPIv3_proto_rawDesc = []byte{ 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x64, 0x41, 0x6e, 0x79, 0x52, 0x16, 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x56, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x3e, 0x0a, 0x0e, 0x6f, 0x72, 0x67, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, 0x42, 0x0c, 0x4f, 0x70, 0x65, 0x6e, 0x41, 0x50, 0x49, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, - 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2f, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2f, 0x6f, 0x70, 0x65, 0x6e, - 0x61, 0x70, 0x69, 0x76, 0x33, 0x3b, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, - 0xa2, 0x02, 0x03, 0x4f, 0x41, 0x53, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x5a, 0x16, 0x2e, 0x2f, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x33, 0x3b, 0x6f, 0x70, + 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, 0xa2, 0x02, 0x03, 0x4f, 0x41, 0x53, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.proto b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.proto similarity index 99% rename from vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.proto rename to vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.proto index 7aede5ed..1be335b8 100644 --- a/vendor/github.com/google/gnostic/openapiv3/OpenAPIv3.proto +++ b/vendor/github.com/google/gnostic-models/openapiv3/OpenAPIv3.proto @@ -42,7 +42,7 @@ option java_package = "org.openapi_v3"; option objc_class_prefix = "OAS"; // The Go package name. -option go_package = "github.com/google/gnostic/openapiv3;openapi_v3"; +option go_package = "./openapiv3;openapi_v3"; message AdditionalPropertiesItem { oneof oneof { diff --git a/vendor/github.com/google/gnostic/openapiv3/README.md b/vendor/github.com/google/gnostic-models/openapiv3/README.md similarity index 89% rename from vendor/github.com/google/gnostic/openapiv3/README.md rename to vendor/github.com/google/gnostic-models/openapiv3/README.md index 83603b82..5ee12d92 100644 --- a/vendor/github.com/google/gnostic/openapiv3/README.md +++ b/vendor/github.com/google/gnostic-models/openapiv3/README.md @@ -19,7 +19,3 @@ for OpenAPI. The schema-generator directory contains support code which generates openapi-3.1.json from the OpenAPI 3.1 specification document (Markdown). - -### How to rebuild - -`protoc -I=. -I=third_party --go_out=. --go_opt=paths=source_relative openapiv3/*.proto` \ No newline at end of file diff --git a/vendor/github.com/google/gnostic/openapiv3/document.go b/vendor/github.com/google/gnostic-models/openapiv3/document.go similarity index 96% rename from vendor/github.com/google/gnostic/openapiv3/document.go rename to vendor/github.com/google/gnostic-models/openapiv3/document.go index ef10d1d9..1cee4677 100644 --- a/vendor/github.com/google/gnostic/openapiv3/document.go +++ b/vendor/github.com/google/gnostic-models/openapiv3/document.go @@ -17,7 +17,7 @@ package openapi_v3 import ( "gopkg.in/yaml.v3" - "github.com/google/gnostic/compiler" + "github.com/google/gnostic-models/compiler" ) // ParseDocument reads an OpenAPI v3 description from a YAML/JSON representation. diff --git a/vendor/github.com/google/gnostic/openapiv3/annotations.pb.go b/vendor/github.com/google/gnostic/openapiv3/annotations.pb.go deleted file mode 100644 index ae242f30..00000000 --- a/vendor/github.com/google/gnostic/openapiv3/annotations.pb.go +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright 2022 Google LLC. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.28.0 -// protoc v3.19.4 -// source: openapiv3/annotations.proto - -package openapi_v3 - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - descriptorpb "google.golang.org/protobuf/types/descriptorpb" - reflect "reflect" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -var file_openapiv3_annotations_proto_extTypes = []protoimpl.ExtensionInfo{ - { - ExtendedType: (*descriptorpb.FileOptions)(nil), - ExtensionType: (*Document)(nil), - Field: 1143, - Name: "openapi.v3.document", - Tag: "bytes,1143,opt,name=document", - Filename: "openapiv3/annotations.proto", - }, - { - ExtendedType: (*descriptorpb.MethodOptions)(nil), - ExtensionType: (*Operation)(nil), - Field: 1143, - Name: "openapi.v3.operation", - Tag: "bytes,1143,opt,name=operation", - Filename: "openapiv3/annotations.proto", - }, - { - ExtendedType: (*descriptorpb.MessageOptions)(nil), - ExtensionType: (*Schema)(nil), - Field: 1143, - Name: "openapi.v3.schema", - Tag: "bytes,1143,opt,name=schema", - Filename: "openapiv3/annotations.proto", - }, - { - ExtendedType: (*descriptorpb.FieldOptions)(nil), - ExtensionType: (*Schema)(nil), - Field: 1143, - Name: "openapi.v3.property", - Tag: "bytes,1143,opt,name=property", - Filename: "openapiv3/annotations.proto", - }, -} - -// Extension fields to descriptorpb.FileOptions. -var ( - // optional openapi.v3.Document document = 1143; - E_Document = &file_openapiv3_annotations_proto_extTypes[0] -) - -// Extension fields to descriptorpb.MethodOptions. -var ( - // optional openapi.v3.Operation operation = 1143; - E_Operation = &file_openapiv3_annotations_proto_extTypes[1] -) - -// Extension fields to descriptorpb.MessageOptions. -var ( - // optional openapi.v3.Schema schema = 1143; - E_Schema = &file_openapiv3_annotations_proto_extTypes[2] -) - -// Extension fields to descriptorpb.FieldOptions. -var ( - // optional openapi.v3.Schema property = 1143; - E_Property = &file_openapiv3_annotations_proto_extTypes[3] -) - -var File_openapiv3_annotations_proto protoreflect.FileDescriptor - -var file_openapiv3_annotations_proto_rawDesc = []byte{ - 0x0a, 0x1b, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x33, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, - 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x6f, - 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x1a, 0x19, 0x6f, 0x70, 0x65, 0x6e, 0x61, - 0x70, 0x69, 0x76, 0x33, 0x2f, 0x4f, 0x70, 0x65, 0x6e, 0x41, 0x50, 0x49, 0x76, 0x33, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x20, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3a, 0x4f, 0x0a, 0x08, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x12, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x18, 0xf7, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x33, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x08, 0x64, - 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x3a, 0x54, 0x0a, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x4f, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xf7, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6f, 0x70, - 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x4c, 0x0a, - 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x1f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xf7, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x2e, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3a, 0x4e, 0x0a, 0x08, 0x70, - 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x12, 0x1d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x4f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xf7, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, - 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x33, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x42, 0x5a, 0x0a, 0x0e, 0x6f, - 0x72, 0x67, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x33, 0x42, 0x10, 0x41, - 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, - 0x01, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x2f, 0x6f, 0x70, 0x65, - 0x6e, 0x61, 0x70, 0x69, 0x76, 0x33, 0x3b, 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x5f, 0x76, - 0x33, 0xa2, 0x02, 0x03, 0x4f, 0x41, 0x53, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var file_openapiv3_annotations_proto_goTypes = []interface{}{ - (*descriptorpb.FileOptions)(nil), // 0: google.protobuf.FileOptions - (*descriptorpb.MethodOptions)(nil), // 1: google.protobuf.MethodOptions - (*descriptorpb.MessageOptions)(nil), // 2: google.protobuf.MessageOptions - (*descriptorpb.FieldOptions)(nil), // 3: google.protobuf.FieldOptions - (*Document)(nil), // 4: openapi.v3.Document - (*Operation)(nil), // 5: openapi.v3.Operation - (*Schema)(nil), // 6: openapi.v3.Schema -} -var file_openapiv3_annotations_proto_depIdxs = []int32{ - 0, // 0: openapi.v3.document:extendee -> google.protobuf.FileOptions - 1, // 1: openapi.v3.operation:extendee -> google.protobuf.MethodOptions - 2, // 2: openapi.v3.schema:extendee -> google.protobuf.MessageOptions - 3, // 3: openapi.v3.property:extendee -> google.protobuf.FieldOptions - 4, // 4: openapi.v3.document:type_name -> openapi.v3.Document - 5, // 5: openapi.v3.operation:type_name -> openapi.v3.Operation - 6, // 6: openapi.v3.schema:type_name -> openapi.v3.Schema - 6, // 7: openapi.v3.property:type_name -> openapi.v3.Schema - 8, // [8:8] is the sub-list for method output_type - 8, // [8:8] is the sub-list for method input_type - 4, // [4:8] is the sub-list for extension type_name - 0, // [0:4] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name -} - -func init() { file_openapiv3_annotations_proto_init() } -func file_openapiv3_annotations_proto_init() { - if File_openapiv3_annotations_proto != nil { - return - } - file_openapiv3_OpenAPIv3_proto_init() - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_openapiv3_annotations_proto_rawDesc, - NumEnums: 0, - NumMessages: 0, - NumExtensions: 4, - NumServices: 0, - }, - GoTypes: file_openapiv3_annotations_proto_goTypes, - DependencyIndexes: file_openapiv3_annotations_proto_depIdxs, - ExtensionInfos: file_openapiv3_annotations_proto_extTypes, - }.Build() - File_openapiv3_annotations_proto = out.File - file_openapiv3_annotations_proto_rawDesc = nil - file_openapiv3_annotations_proto_goTypes = nil - file_openapiv3_annotations_proto_depIdxs = nil -} diff --git a/vendor/github.com/google/gnostic/openapiv3/annotations.proto b/vendor/github.com/google/gnostic/openapiv3/annotations.proto deleted file mode 100644 index 0bd87810..00000000 --- a/vendor/github.com/google/gnostic/openapiv3/annotations.proto +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2022 Google LLC. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package openapi.v3; - -import "openapiv3/OpenAPIv3.proto"; -import "google/protobuf/descriptor.proto"; - -// This option lets the proto compiler generate Java code inside the package -// name (see below) instead of inside an outer class. It creates a simpler -// developer experience by reducing one-level of name nesting and be -// consistent with most programming languages that don't support outer classes. -option java_multiple_files = true; - -// The Java outer classname should be the filename in UpperCamelCase. This -// class is only used to hold proto descriptor, so developers don't need to -// work with it directly. -option java_outer_classname = "AnnotationsProto"; - -// The Java package name must be proto package name with proper prefix. -option java_package = "org.openapi_v3"; - -// A reasonable prefix for the Objective-C symbols generated from the package. -// It should at a minimum be 3 characters long, all uppercase, and convention -// is to use an abbreviation of the package name. Something short, but -// hopefully unique enough to not conflict with things that may come along in -// the future. 'GPB' is reserved for the protocol buffer implementation itself. -option objc_class_prefix = "OAS"; - -// The Go package name. -option go_package = "github.com/google/gnostic/openapiv3;openapi_v3"; - -extend google.protobuf.FileOptions { - Document document = 1143; -} - -extend google.protobuf.MethodOptions { - Operation operation = 1143; -} - -extend google.protobuf.MessageOptions { - Schema schema = 1143; -} - -extend google.protobuf.FieldOptions { - Schema property = 1143; -} \ No newline at end of file diff --git a/vendor/github.com/google/gnostic/openapiv3/openapi-3.0.json b/vendor/github.com/google/gnostic/openapiv3/openapi-3.0.json deleted file mode 100644 index d5caed16..00000000 --- a/vendor/github.com/google/gnostic/openapiv3/openapi-3.0.json +++ /dev/null @@ -1,1251 +0,0 @@ -{ - "title": "A JSON Schema for OpenAPI 3.0.", - "id": "http://openapis.org/v3/schema.json#", - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "description": "This is the root document object of the OpenAPI document.", - "required": [ - "openapi", - "info", - "paths" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "openapi": { - "type": "string" - }, - "info": { - "$ref": "#/definitions/info" - }, - "servers": { - "type": "array", - "items": { - "$ref": "#/definitions/server" - }, - "uniqueItems": true - }, - "paths": { - "$ref": "#/definitions/paths" - }, - "components": { - "$ref": "#/definitions/components" - }, - "security": { - "type": "array", - "items": { - "$ref": "#/definitions/securityRequirement" - }, - "uniqueItems": true - }, - "tags": { - "type": "array", - "items": { - "$ref": "#/definitions/tag" - }, - "uniqueItems": true - }, - "externalDocs": { - "$ref": "#/definitions/externalDocs" - } - }, - "definitions": { - "info": { - "type": "object", - "description": "The object provides metadata about the API. The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience.", - "required": [ - "title", - "version" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "title": { - "type": "string" - }, - "description": { - "type": "string" - }, - "termsOfService": { - "type": "string" - }, - "contact": { - "$ref": "#/definitions/contact" - }, - "license": { - "$ref": "#/definitions/license" - }, - "version": { - "type": "string" - } - } - }, - "contact": { - "type": "object", - "description": "Contact information for the exposed API.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "url": { - "type": "string", - "format": "uri" - }, - "email": { - "type": "string", - "format": "email" - } - } - }, - "license": { - "type": "object", - "description": "License information for the exposed API.", - "required": [ - "name" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "url": { - "type": "string" - } - } - }, - "server": { - "type": "object", - "description": "An object representing a Server.", - "required": [ - "url" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "url": { - "type": "string" - }, - "description": { - "type": "string" - }, - "variables": { - "$ref": "#/definitions/serverVariables" - } - } - }, - "serverVariable": { - "type": "object", - "description": "An object representing a Server Variable for server URL template substitution.", - "required": [ - "default" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "enum": { - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - }, - "default": { - "type": "string" - }, - "description": { - "type": "string" - } - } - }, - "components": { - "type": "object", - "description": "Holds a set of reusable objects for different aspects of the OAS. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "schemas": { - "$ref": "#/definitions/schemasOrReferences" - }, - "responses": { - "$ref": "#/definitions/responsesOrReferences" - }, - "parameters": { - "$ref": "#/definitions/parametersOrReferences" - }, - "examples": { - "$ref": "#/definitions/examplesOrReferences" - }, - "requestBodies": { - "$ref": "#/definitions/requestBodiesOrReferences" - }, - "headers": { - "$ref": "#/definitions/headersOrReferences" - }, - "securitySchemes": { - "$ref": "#/definitions/securitySchemesOrReferences" - }, - "links": { - "$ref": "#/definitions/linksOrReferences" - }, - "callbacks": { - "$ref": "#/definitions/callbacksOrReferences" - } - } - }, - "paths": { - "type": "object", - "description": "Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the `Server Object` in order to construct the full URL. The Paths MAY be empty, due to ACL constraints.", - "additionalProperties": false, - "patternProperties": { - "^/": { - "$ref": "#/definitions/pathItem" - }, - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - } - }, - "pathItem": { - "type": "object", - "description": "Describes the operations available on a single path. A Path Item MAY be empty, due to ACL constraints. The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "$ref": { - "type": "string" - }, - "summary": { - "type": "string" - }, - "description": { - "type": "string" - }, - "get": { - "$ref": "#/definitions/operation" - }, - "put": { - "$ref": "#/definitions/operation" - }, - "post": { - "$ref": "#/definitions/operation" - }, - "delete": { - "$ref": "#/definitions/operation" - }, - "options": { - "$ref": "#/definitions/operation" - }, - "head": { - "$ref": "#/definitions/operation" - }, - "patch": { - "$ref": "#/definitions/operation" - }, - "trace": { - "$ref": "#/definitions/operation" - }, - "servers": { - "type": "array", - "items": { - "$ref": "#/definitions/server" - }, - "uniqueItems": true - }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/definitions/parameterOrReference" - }, - "uniqueItems": true - } - } - }, - "operation": { - "type": "object", - "description": "Describes a single API operation on a path.", - "required": [ - "responses" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "tags": { - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - }, - "summary": { - "type": "string" - }, - "description": { - "type": "string" - }, - "externalDocs": { - "$ref": "#/definitions/externalDocs" - }, - "operationId": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/definitions/parameterOrReference" - }, - "uniqueItems": true - }, - "requestBody": { - "$ref": "#/definitions/requestBodyOrReference" - }, - "responses": { - "$ref": "#/definitions/responses" - }, - "callbacks": { - "$ref": "#/definitions/callbacksOrReferences" - }, - "deprecated": { - "type": "boolean" - }, - "security": { - "type": "array", - "items": { - "$ref": "#/definitions/securityRequirement" - }, - "uniqueItems": true - }, - "servers": { - "type": "array", - "items": { - "$ref": "#/definitions/server" - }, - "uniqueItems": true - } - } - }, - "externalDocs": { - "type": "object", - "description": "Allows referencing an external resource for extended documentation.", - "required": [ - "url" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "description": { - "type": "string" - }, - "url": { - "type": "string" - } - } - }, - "parameter": { - "type": "object", - "description": "Describes a single operation parameter. A unique parameter is defined by a combination of a name and location.", - "required": [ - "name", - "in" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "in": { - "type": "string" - }, - "description": { - "type": "string" - }, - "required": { - "type": "boolean" - }, - "deprecated": { - "type": "boolean" - }, - "allowEmptyValue": { - "type": "boolean" - }, - "style": { - "type": "string" - }, - "explode": { - "type": "boolean" - }, - "allowReserved": { - "type": "boolean" - }, - "schema": { - "$ref": "#/definitions/schemaOrReference" - }, - "example": { - "$ref": "#/definitions/any" - }, - "examples": { - "$ref": "#/definitions/examplesOrReferences" - }, - "content": { - "$ref": "#/definitions/mediaTypes" - } - } - }, - "requestBody": { - "type": "object", - "description": "Describes a single request body.", - "required": [ - "content" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "description": { - "type": "string" - }, - "content": { - "$ref": "#/definitions/mediaTypes" - }, - "required": { - "type": "boolean" - } - } - }, - "mediaType": { - "type": "object", - "description": "Each Media Type Object provides schema and examples for the media type identified by its key.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "schema": { - "$ref": "#/definitions/schemaOrReference" - }, - "example": { - "$ref": "#/definitions/any" - }, - "examples": { - "$ref": "#/definitions/examplesOrReferences" - }, - "encoding": { - "$ref": "#/definitions/encodings" - } - } - }, - "encoding": { - "type": "object", - "description": "A single encoding definition applied to a single schema property.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "contentType": { - "type": "string" - }, - "headers": { - "$ref": "#/definitions/headersOrReferences" - }, - "style": { - "type": "string" - }, - "explode": { - "type": "boolean" - }, - "allowReserved": { - "type": "boolean" - } - } - }, - "responses": { - "type": "object", - "description": "A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. However, documentation is expected to cover a successful operation response and any known errors. The `default` MAY be used as a default response object for all HTTP codes that are not covered individually by the specification. The `Responses Object` MUST contain at least one response code, and it SHOULD be the response for a successful operation call.", - "additionalProperties": false, - "patternProperties": { - "^([0-9X]{3})$": { - "$ref": "#/definitions/responseOrReference" - }, - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "default": { - "$ref": "#/definitions/responseOrReference" - } - } - }, - "response": { - "type": "object", - "description": "Describes a single response from an API Operation, including design-time, static `links` to operations based on the response.", - "required": [ - "description" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "description": { - "type": "string" - }, - "headers": { - "$ref": "#/definitions/headersOrReferences" - }, - "content": { - "$ref": "#/definitions/mediaTypes" - }, - "links": { - "$ref": "#/definitions/linksOrReferences" - } - } - }, - "callback": { - "type": "object", - "description": "A map of possible out-of band callbacks related to the parent operation. Each value in the map is a Path Item Object that describes a set of requests that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation.", - "additionalProperties": false, - "patternProperties": { - "^": { - "$ref": "#/definitions/pathItem" - }, - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - } - }, - "example": { - "type": "object", - "description": "", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "summary": { - "type": "string" - }, - "description": { - "type": "string" - }, - "value": { - "$ref": "#/definitions/any" - }, - "externalValue": { - "type": "string" - } - } - }, - "link": { - "type": "object", - "description": "The `Link object` represents a possible design-time link for a response. The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. For computing links, and providing instructions to execute them, a runtime expression is used for accessing values in an operation and using them as parameters while invoking the linked operation.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "operationRef": { - "type": "string" - }, - "operationId": { - "type": "string" - }, - "parameters": { - "$ref": "#/definitions/anysOrExpressions" - }, - "requestBody": { - "$ref": "#/definitions/anyOrExpression" - }, - "description": { - "type": "string" - }, - "server": { - "$ref": "#/definitions/server" - } - } - }, - "header": { - "type": "object", - "description": "The Header Object follows the structure of the Parameter Object with the following changes: 1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. 1. `in` MUST NOT be specified, it is implicitly in `header`. 1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, `style`).", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "description": { - "type": "string" - }, - "required": { - "type": "boolean" - }, - "deprecated": { - "type": "boolean" - }, - "allowEmptyValue": { - "type": "boolean" - }, - "style": { - "type": "string" - }, - "explode": { - "type": "boolean" - }, - "allowReserved": { - "type": "boolean" - }, - "schema": { - "$ref": "#/definitions/schemaOrReference" - }, - "example": { - "$ref": "#/definitions/any" - }, - "examples": { - "$ref": "#/definitions/examplesOrReferences" - }, - "content": { - "$ref": "#/definitions/mediaTypes" - } - } - }, - "tag": { - "type": "object", - "description": "Adds metadata to a single tag that is used by the Operation Object. It is not mandatory to have a Tag Object per tag defined in the Operation Object instances.", - "required": [ - "name" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "description": { - "type": "string" - }, - "externalDocs": { - "$ref": "#/definitions/externalDocs" - } - } - }, - "reference": { - "type": "object", - "description": "A simple object to allow referencing other components in the specification, internally and externally. The Reference Object is defined by JSON Reference and follows the same structure, behavior and rules. For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification.", - "required": [ - "$ref" - ], - "additionalProperties": false, - "properties": { - "$ref": { - "type": "string" - }, - "summary": { - "type": "string" - }, - "description": { - "type": "string" - } - } - }, - "schema": { - "type": "object", - "description": "The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is an extended subset of the JSON Schema Specification Wright Draft 00. For more information about the properties, see JSON Schema Core and JSON Schema Validation. Unless stated otherwise, the property definitions follow the JSON Schema.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "nullable": { - "type": "boolean" - }, - "discriminator": { - "$ref": "#/definitions/discriminator" - }, - "readOnly": { - "type": "boolean" - }, - "writeOnly": { - "type": "boolean" - }, - "xml": { - "$ref": "#/definitions/xml" - }, - "externalDocs": { - "$ref": "#/definitions/externalDocs" - }, - "example": { - "$ref": "#/definitions/any" - }, - "deprecated": { - "type": "boolean" - }, - "title": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/title" - }, - "multipleOf": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" - }, - "maximum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" - }, - "exclusiveMaximum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" - }, - "minimum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" - }, - "exclusiveMinimum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" - }, - "maxLength": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/maxLength" - }, - "minLength": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/minLength" - }, - "pattern": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" - }, - "maxItems": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/maxItems" - }, - "minItems": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/minItems" - }, - "uniqueItems": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" - }, - "maxProperties": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/maxProperties" - }, - "minProperties": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/minProperties" - }, - "required": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/required" - }, - "enum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" - }, - "type": { - "type": "string" - }, - "allOf": { - "type": "array", - "items": { - "$ref": "#/definitions/schemaOrReference" - }, - "minItems": 1 - }, - "oneOf": { - "type": "array", - "items": { - "$ref": "#/definitions/schemaOrReference" - }, - "minItems": 1 - }, - "anyOf": { - "type": "array", - "items": { - "$ref": "#/definitions/schemaOrReference" - }, - "minItems": 1 - }, - "not": { - "$ref": "#/definitions/schema" - }, - "items": { - "anyOf": [ - { - "$ref": "#/definitions/schemaOrReference" - }, - { - "type": "array", - "items": { - "$ref": "#/definitions/schemaOrReference" - }, - "minItems": 1 - } - ] - }, - "properties": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/schemaOrReference" - } - }, - "additionalProperties": { - "oneOf": [ - { - "$ref": "#/definitions/schemaOrReference" - }, - { - "type": "boolean" - } - ] - }, - "default": { - "$ref": "#/definitions/defaultType" - }, - "description": { - "type": "string" - }, - "format": { - "type": "string" - } - } - }, - "discriminator": { - "type": "object", - "description": "When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. When using the discriminator, _inline_ schemas will not be considered.", - "required": [ - "propertyName" - ], - "additionalProperties": false, - "properties": { - "propertyName": { - "type": "string" - }, - "mapping": { - "$ref": "#/definitions/strings" - } - } - }, - "xml": { - "type": "object", - "description": "A metadata object that allows for more fine-tuned XML model definitions. When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. See examples for expected behavior.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "namespace": { - "type": "string" - }, - "prefix": { - "type": "string" - }, - "attribute": { - "type": "boolean" - }, - "wrapped": { - "type": "boolean" - } - } - }, - "securityScheme": { - "type": "object", - "description": "Defines a security scheme that can be used by the operations. Supported schemes are HTTP authentication, an API key (either as a header or as a query parameter), OAuth2's common flows (implicit, password, application and access code) as defined in RFC6749, and OpenID Connect Discovery.", - "required": [ - "type" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "type": { - "type": "string" - }, - "description": { - "type": "string" - }, - "name": { - "type": "string" - }, - "in": { - "type": "string" - }, - "scheme": { - "type": "string" - }, - "bearerFormat": { - "type": "string" - }, - "flows": { - "$ref": "#/definitions/oauthFlows" - }, - "openIdConnectUrl": { - "type": "string" - } - } - }, - "oauthFlows": { - "type": "object", - "description": "Allows configuration of the supported OAuth Flows.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "implicit": { - "$ref": "#/definitions/oauthFlow" - }, - "password": { - "$ref": "#/definitions/oauthFlow" - }, - "clientCredentials": { - "$ref": "#/definitions/oauthFlow" - }, - "authorizationCode": { - "$ref": "#/definitions/oauthFlow" - } - } - }, - "oauthFlow": { - "type": "object", - "description": "Configuration details for a supported OAuth Flow", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "authorizationUrl": { - "type": "string" - }, - "tokenUrl": { - "type": "string" - }, - "refreshUrl": { - "type": "string" - }, - "scopes": { - "$ref": "#/definitions/strings" - } - } - }, - "securityRequirement": { - "type": "object", - "description": "Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the Security Schemes under the Components Object. Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. When a list of Security Requirement Objects is defined on the Open API object or Operation Object, only one of Security Requirement Objects in the list needs to be satisfied to authorize the request.", - "additionalProperties": false, - "patternProperties": { - "^[a-zA-Z0-9\\.\\-_]+$": { - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - } - } - }, - "anyOrExpression": { - "oneOf": [ - { - "$ref": "#/definitions/any" - }, - { - "$ref": "#/definitions/expression" - } - ] - }, - "callbackOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/callback" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "exampleOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/example" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "headerOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/header" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "linkOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/link" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "parameterOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/parameter" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "requestBodyOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/requestBody" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "responseOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/response" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "schemaOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/schema" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "securitySchemeOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/securityScheme" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "anysOrExpressions": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/anyOrExpression" - } - }, - "callbacksOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/callbackOrReference" - } - }, - "encodings": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/encoding" - } - }, - "examplesOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/exampleOrReference" - } - }, - "headersOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/headerOrReference" - } - }, - "linksOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/linkOrReference" - } - }, - "mediaTypes": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/mediaType" - } - }, - "parametersOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/parameterOrReference" - } - }, - "requestBodiesOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/requestBodyOrReference" - } - }, - "responsesOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/responseOrReference" - } - }, - "schemasOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/schemaOrReference" - } - }, - "securitySchemesOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/securitySchemeOrReference" - } - }, - "serverVariables": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/serverVariable" - } - }, - "strings": { - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "object": { - "type": "object", - "additionalProperties": true - }, - "any": { - "additionalProperties": true - }, - "expression": { - "type": "object", - "additionalProperties": true - }, - "specificationExtension": { - "description": "Any property starting with x- is valid.", - "oneOf": [ - { - "type": "null" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "string" - }, - { - "type": "object" - }, - { - "type": "array" - } - ] - }, - "defaultType": { - "oneOf": [ - { - "type": "null" - }, - { - "type": "array" - }, - { - "type": "object" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "string" - } - ] - } - } -} diff --git a/vendor/github.com/google/gnostic/openapiv3/openapi-3.1.json b/vendor/github.com/google/gnostic/openapiv3/openapi-3.1.json deleted file mode 100644 index ed0b83ad..00000000 --- a/vendor/github.com/google/gnostic/openapiv3/openapi-3.1.json +++ /dev/null @@ -1,1250 +0,0 @@ -{ - "title": "A JSON Schema for OpenAPI 3.0.", - "id": "http://openapis.org/v3/schema.json#", - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "description": "This is the root document object of the OpenAPI document.", - "required": [ - "openapi", - "info", - "paths" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "openapi": { - "type": "string" - }, - "info": { - "$ref": "#/definitions/info" - }, - "servers": { - "type": "array", - "items": { - "$ref": "#/definitions/server" - }, - "uniqueItems": true - }, - "paths": { - "$ref": "#/definitions/paths" - }, - "components": { - "$ref": "#/definitions/components" - }, - "security": { - "type": "array", - "items": { - "$ref": "#/definitions/securityRequirement" - }, - "uniqueItems": true - }, - "tags": { - "type": "array", - "items": { - "$ref": "#/definitions/tag" - }, - "uniqueItems": true - }, - "externalDocs": { - "$ref": "#/definitions/externalDocs" - } - }, - "definitions": { - "info": { - "type": "object", - "description": "The object provides metadata about the API. The metadata MAY be used by the clients if needed, and MAY be presented in editing or documentation generation tools for convenience.", - "required": [ - "title", - "version" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "title": { - "type": "string" - }, - "description": { - "type": "string" - }, - "termsOfService": { - "type": "string" - }, - "contact": { - "$ref": "#/definitions/contact" - }, - "license": { - "$ref": "#/definitions/license" - }, - "version": { - "type": "string" - }, - "summary": { - "type": "string" - } - } - }, - "contact": { - "type": "object", - "description": "Contact information for the exposed API.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "url": { - "type": "string", - "format": "uri" - }, - "email": { - "type": "string", - "format": "email" - } - } - }, - "license": { - "type": "object", - "description": "License information for the exposed API.", - "required": [ - "name" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "url": { - "type": "string" - } - } - }, - "server": { - "type": "object", - "description": "An object representing a Server.", - "required": [ - "url" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "url": { - "type": "string" - }, - "description": { - "type": "string" - }, - "variables": { - "$ref": "#/definitions/serverVariables" - } - } - }, - "serverVariable": { - "type": "object", - "description": "An object representing a Server Variable for server URL template substitution.", - "required": [ - "default" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "enum": { - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - }, - "default": { - "type": "string" - }, - "description": { - "type": "string" - } - } - }, - "components": { - "type": "object", - "description": "Holds a set of reusable objects for different aspects of the OAS. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "schemas": { - "$ref": "#/definitions/schemasOrReferences" - }, - "responses": { - "$ref": "#/definitions/responsesOrReferences" - }, - "parameters": { - "$ref": "#/definitions/parametersOrReferences" - }, - "examples": { - "$ref": "#/definitions/examplesOrReferences" - }, - "requestBodies": { - "$ref": "#/definitions/requestBodiesOrReferences" - }, - "headers": { - "$ref": "#/definitions/headersOrReferences" - }, - "securitySchemes": { - "$ref": "#/definitions/securitySchemesOrReferences" - }, - "links": { - "$ref": "#/definitions/linksOrReferences" - }, - "callbacks": { - "$ref": "#/definitions/callbacksOrReferences" - } - } - }, - "paths": { - "type": "object", - "description": "Holds the relative paths to the individual endpoints and their operations. The path is appended to the URL from the `Server Object` in order to construct the full URL. The Paths MAY be empty, due to ACL constraints.", - "additionalProperties": false, - "patternProperties": { - "^/": { - "$ref": "#/definitions/pathItem" - }, - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - } - }, - "pathItem": { - "type": "object", - "description": "Describes the operations available on a single path. A Path Item MAY be empty, due to ACL constraints. The path itself is still exposed to the documentation viewer but they will not know which operations and parameters are available.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "$ref": { - "type": "string" - }, - "summary": { - "type": "string" - }, - "description": { - "type": "string" - }, - "get": { - "$ref": "#/definitions/operation" - }, - "put": { - "$ref": "#/definitions/operation" - }, - "post": { - "$ref": "#/definitions/operation" - }, - "delete": { - "$ref": "#/definitions/operation" - }, - "options": { - "$ref": "#/definitions/operation" - }, - "head": { - "$ref": "#/definitions/operation" - }, - "patch": { - "$ref": "#/definitions/operation" - }, - "trace": { - "$ref": "#/definitions/operation" - }, - "servers": { - "type": "array", - "items": { - "$ref": "#/definitions/server" - }, - "uniqueItems": true - }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/definitions/parameterOrReference" - }, - "uniqueItems": true - } - } - }, - "operation": { - "type": "object", - "description": "Describes a single API operation on a path.", - "required": [ - "responses" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "tags": { - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - }, - "summary": { - "type": "string" - }, - "description": { - "type": "string" - }, - "externalDocs": { - "$ref": "#/definitions/externalDocs" - }, - "operationId": { - "type": "string" - }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/definitions/parameterOrReference" - }, - "uniqueItems": true - }, - "requestBody": { - "$ref": "#/definitions/requestBodyOrReference" - }, - "responses": { - "$ref": "#/definitions/responses" - }, - "callbacks": { - "$ref": "#/definitions/callbacksOrReferences" - }, - "deprecated": { - "type": "boolean" - }, - "security": { - "type": "array", - "items": { - "$ref": "#/definitions/securityRequirement" - }, - "uniqueItems": true - }, - "servers": { - "type": "array", - "items": { - "$ref": "#/definitions/server" - }, - "uniqueItems": true - } - } - }, - "externalDocs": { - "type": "object", - "description": "Allows referencing an external resource for extended documentation.", - "required": [ - "url" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "description": { - "type": "string" - }, - "url": { - "type": "string" - } - } - }, - "parameter": { - "type": "object", - "description": "Describes a single operation parameter. A unique parameter is defined by a combination of a name and location.", - "required": [ - "name", - "in" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "in": { - "type": "string" - }, - "description": { - "type": "string" - }, - "required": { - "type": "boolean" - }, - "deprecated": { - "type": "boolean" - }, - "allowEmptyValue": { - "type": "boolean" - }, - "style": { - "type": "string" - }, - "explode": { - "type": "boolean" - }, - "allowReserved": { - "type": "boolean" - }, - "schema": { - "$ref": "#/definitions/schemaOrReference" - }, - "example": { - "$ref": "#/definitions/any" - }, - "examples": { - "$ref": "#/definitions/examplesOrReferences" - }, - "content": { - "$ref": "#/definitions/mediaTypes" - } - } - }, - "requestBody": { - "type": "object", - "description": "Describes a single request body.", - "required": [ - "content" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "description": { - "type": "string" - }, - "content": { - "$ref": "#/definitions/mediaTypes" - }, - "required": { - "type": "boolean" - } - } - }, - "mediaType": { - "type": "object", - "description": "Each Media Type Object provides schema and examples for the media type identified by its key.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "schema": { - "$ref": "#/definitions/schemaOrReference" - }, - "example": { - "$ref": "#/definitions/any" - }, - "examples": { - "$ref": "#/definitions/examplesOrReferences" - }, - "encoding": { - "$ref": "#/definitions/encodings" - } - } - }, - "encoding": { - "type": "object", - "description": "A single encoding definition applied to a single schema property.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "contentType": { - "type": "string" - }, - "headers": { - "$ref": "#/definitions/headersOrReferences" - }, - "style": { - "type": "string" - }, - "explode": { - "type": "boolean" - }, - "allowReserved": { - "type": "boolean" - } - } - }, - "responses": { - "type": "object", - "description": "A container for the expected responses of an operation. The container maps a HTTP response code to the expected response. The documentation is not necessarily expected to cover all possible HTTP response codes because they may not be known in advance. However, documentation is expected to cover a successful operation response and any known errors. The `default` MAY be used as a default response object for all HTTP codes that are not covered individually by the specification. The `Responses Object` MUST contain at least one response code, and it SHOULD be the response for a successful operation call.", - "additionalProperties": false, - "patternProperties": { - "^([0-9X]{3})$": { - "$ref": "#/definitions/responseOrReference" - }, - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "default": { - "$ref": "#/definitions/responseOrReference" - } - } - }, - "response": { - "type": "object", - "description": "Describes a single response from an API Operation, including design-time, static `links` to operations based on the response.", - "required": [ - "description" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "description": { - "type": "string" - }, - "headers": { - "$ref": "#/definitions/headersOrReferences" - }, - "content": { - "$ref": "#/definitions/mediaTypes" - }, - "links": { - "$ref": "#/definitions/linksOrReferences" - } - } - }, - "callback": { - "type": "object", - "description": "A map of possible out-of band callbacks related to the parent operation. Each value in the map is a Path Item Object that describes a set of requests that may be initiated by the API provider and the expected responses. The key value used to identify the callback object is an expression, evaluated at runtime, that identifies a URL to use for the callback operation.", - "additionalProperties": false, - "patternProperties": { - "^": { - "$ref": "#/definitions/pathItem" - }, - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - } - }, - "example": { - "type": "object", - "description": "", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "summary": { - "type": "string" - }, - "description": { - "type": "string" - }, - "value": { - "$ref": "#/definitions/any" - }, - "externalValue": { - "type": "string" - } - } - }, - "link": { - "type": "object", - "description": "The `Link object` represents a possible design-time link for a response. The presence of a link does not guarantee the caller's ability to successfully invoke it, rather it provides a known relationship and traversal mechanism between responses and other operations. Unlike _dynamic_ links (i.e. links provided **in** the response payload), the OAS linking mechanism does not require link information in the runtime response. For computing links, and providing instructions to execute them, a runtime expression is used for accessing values in an operation and using them as parameters while invoking the linked operation.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "operationRef": { - "type": "string" - }, - "operationId": { - "type": "string" - }, - "parameters": { - "$ref": "#/definitions/anyOrExpression" - }, - "requestBody": { - "$ref": "#/definitions/anyOrExpression" - }, - "description": { - "type": "string" - }, - "server": { - "$ref": "#/definitions/server" - } - } - }, - "header": { - "type": "object", - "description": "The Header Object follows the structure of the Parameter Object with the following changes: 1. `name` MUST NOT be specified, it is given in the corresponding `headers` map. 1. `in` MUST NOT be specified, it is implicitly in `header`. 1. All traits that are affected by the location MUST be applicable to a location of `header` (for example, `style`).", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "description": { - "type": "string" - }, - "required": { - "type": "boolean" - }, - "deprecated": { - "type": "boolean" - }, - "allowEmptyValue": { - "type": "boolean" - }, - "style": { - "type": "string" - }, - "explode": { - "type": "boolean" - }, - "allowReserved": { - "type": "boolean" - }, - "schema": { - "$ref": "#/definitions/schemaOrReference" - }, - "example": { - "$ref": "#/definitions/any" - }, - "examples": { - "$ref": "#/definitions/examplesOrReferences" - }, - "content": { - "$ref": "#/definitions/mediaTypes" - } - } - }, - "tag": { - "type": "object", - "description": "Adds metadata to a single tag that is used by the Operation Object. It is not mandatory to have a Tag Object per tag defined in the Operation Object instances.", - "required": [ - "name" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "description": { - "type": "string" - }, - "externalDocs": { - "$ref": "#/definitions/externalDocs" - } - } - }, - "reference": { - "type": "object", - "description": "A simple object to allow referencing other components in the specification, internally and externally. The Reference Object is defined by JSON Reference and follows the same structure, behavior and rules. For this specification, reference resolution is accomplished as defined by the JSON Reference specification and not by the JSON Schema specification.", - "required": [ - "$ref" - ], - "additionalProperties": false, - "properties": { - "$ref": { - "type": "string" - }, - "summary": { - "type": "string" - }, - "description": { - "type": "string" - } - } - }, - "schema": { - "type": "object", - "description": "The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is an extended subset of the JSON Schema Specification Wright Draft 00. For more information about the properties, see JSON Schema Core and JSON Schema Validation. Unless stated otherwise, the property definitions follow the JSON Schema.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "nullable": { - "type": "boolean" - }, - "discriminator": { - "$ref": "#/definitions/discriminator" - }, - "readOnly": { - "type": "boolean" - }, - "writeOnly": { - "type": "boolean" - }, - "xml": { - "$ref": "#/definitions/xml" - }, - "externalDocs": { - "$ref": "#/definitions/externalDocs" - }, - "example": { - "$ref": "#/definitions/any" - }, - "deprecated": { - "type": "boolean" - }, - "title": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/title" - }, - "multipleOf": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" - }, - "maximum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" - }, - "exclusiveMaximum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" - }, - "minimum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" - }, - "exclusiveMinimum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" - }, - "maxLength": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/maxLength" - }, - "minLength": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/minLength" - }, - "pattern": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" - }, - "maxItems": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/maxItems" - }, - "minItems": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/minItems" - }, - "uniqueItems": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" - }, - "maxProperties": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/maxProperties" - }, - "minProperties": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/minProperties" - }, - "required": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/required" - }, - "enum": { - "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" - }, - "type": { - "type": "string" - }, - "allOf": { - "type": "array", - "items": { - "$ref": "#/definitions/schemaOrReference" - }, - "minItems": 1 - }, - "oneOf": { - "type": "array", - "items": { - "$ref": "#/definitions/schemaOrReference" - }, - "minItems": 1 - }, - "anyOf": { - "type": "array", - "items": { - "$ref": "#/definitions/schemaOrReference" - }, - "minItems": 1 - }, - "not": { - "$ref": "#/definitions/schema" - }, - "items": { - "anyOf": [ - { - "$ref": "#/definitions/schemaOrReference" - }, - { - "type": "array", - "items": { - "$ref": "#/definitions/schemaOrReference" - }, - "minItems": 1 - } - ] - }, - "properties": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/schemaOrReference" - } - }, - "additionalProperties": { - "oneOf": [ - { - "$ref": "#/definitions/schemaOrReference" - }, - { - "type": "boolean" - } - ] - }, - "default": { - "$ref": "#/definitions/defaultType" - }, - "description": { - "type": "string" - }, - "format": { - "type": "string" - } - } - }, - "discriminator": { - "type": "object", - "description": "When request bodies or response payloads may be one of a number of different schemas, a `discriminator` object can be used to aid in serialization, deserialization, and validation. The discriminator is a specific object in a schema which is used to inform the consumer of the specification of an alternative schema based on the value associated with it. When using the discriminator, _inline_ schemas will not be considered.", - "required": [ - "propertyName" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "propertyName": { - "type": "string" - }, - "mapping": { - "$ref": "#/definitions/strings" - } - } - }, - "xml": { - "type": "object", - "description": "A metadata object that allows for more fine-tuned XML model definitions. When using arrays, XML element names are *not* inferred (for singular/plural forms) and the `name` property SHOULD be used to add that information. See examples for expected behavior.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "name": { - "type": "string" - }, - "namespace": { - "type": "string" - }, - "prefix": { - "type": "string" - }, - "attribute": { - "type": "boolean" - }, - "wrapped": { - "type": "boolean" - } - } - }, - "securityScheme": { - "type": "object", - "description": "Defines a security scheme that can be used by the operations. Supported schemes are HTTP authentication, an API key (either as a header, a cookie parameter or as a query parameter), mutual TLS (use of a client certificate), OAuth2's common flows (implicit, password, application and access code) as defined in RFC6749, and OpenID Connect. Please note that currently (2019) the implicit flow is about to be deprecated OAuth 2.0 Security Best Current Practice. Recommended for most use case is Authorization Code Grant flow with PKCE.", - "required": [ - "type" - ], - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "type": { - "type": "string" - }, - "description": { - "type": "string" - }, - "name": { - "type": "string" - }, - "in": { - "type": "string" - }, - "scheme": { - "type": "string" - }, - "bearerFormat": { - "type": "string" - }, - "flows": { - "$ref": "#/definitions/oauthFlows" - }, - "openIdConnectUrl": { - "type": "string" - } - } - }, - "oauthFlows": { - "type": "object", - "description": "Allows configuration of the supported OAuth Flows.", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "implicit": { - "$ref": "#/definitions/oauthFlow" - }, - "password": { - "$ref": "#/definitions/oauthFlow" - }, - "clientCredentials": { - "$ref": "#/definitions/oauthFlow" - }, - "authorizationCode": { - "$ref": "#/definitions/oauthFlow" - } - } - }, - "oauthFlow": { - "type": "object", - "description": "Configuration details for a supported OAuth Flow", - "additionalProperties": false, - "patternProperties": { - "^x-": { - "$ref": "#/definitions/specificationExtension" - } - }, - "properties": { - "authorizationUrl": { - "type": "string" - }, - "tokenUrl": { - "type": "string" - }, - "refreshUrl": { - "type": "string" - }, - "scopes": { - "$ref": "#/definitions/strings" - } - } - }, - "securityRequirement": { - "type": "object", - "description": "Lists the required security schemes to execute this operation. The name used for each property MUST correspond to a security scheme declared in the Security Schemes under the Components Object. Security Requirement Objects that contain multiple schemes require that all schemes MUST be satisfied for a request to be authorized. This enables support for scenarios where multiple query parameters or HTTP headers are required to convey security information. When a list of Security Requirement Objects is defined on the OpenAPI Object or Operation Object, only one of the Security Requirement Objects in the list needs to be satisfied to authorize the request.", - "additionalProperties": { - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - } - }, - "anyOrExpression": { - "oneOf": [ - { - "$ref": "#/definitions/any" - }, - { - "$ref": "#/definitions/expression" - } - ] - }, - "callbackOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/callback" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "exampleOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/example" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "headerOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/header" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "linkOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/link" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "parameterOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/parameter" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "requestBodyOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/requestBody" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "responseOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/response" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "schemaOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/schema" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "securitySchemeOrReference": { - "oneOf": [ - { - "$ref": "#/definitions/securityScheme" - }, - { - "$ref": "#/definitions/reference" - } - ] - }, - "callbacksOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/callbackOrReference" - } - }, - "encodings": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/encoding" - } - }, - "examplesOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/exampleOrReference" - } - }, - "headersOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/headerOrReference" - } - }, - "linksOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/linkOrReference" - } - }, - "mediaTypes": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/mediaType" - } - }, - "parametersOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/parameterOrReference" - } - }, - "requestBodiesOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/requestBodyOrReference" - } - }, - "responsesOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/responseOrReference" - } - }, - "schemasOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/schemaOrReference" - } - }, - "securitySchemesOrReferences": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/securitySchemeOrReference" - } - }, - "serverVariables": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/serverVariable" - } - }, - "strings": { - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "object": { - "type": "object", - "additionalProperties": true - }, - "any": { - "additionalProperties": true - }, - "expression": { - "type": "object", - "additionalProperties": true - }, - "specificationExtension": { - "description": "Any property starting with x- is valid.", - "oneOf": [ - { - "type": "null" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "string" - }, - { - "type": "object" - }, - { - "type": "array" - } - ] - }, - "defaultType": { - "oneOf": [ - { - "type": "null" - }, - { - "type": "array" - }, - { - "type": "object" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "string" - } - ] - } - } -} diff --git a/vendor/github.com/kubernetes-csi/csi-lib-utils/protosanitizer/protosanitizer.go b/vendor/github.com/kubernetes-csi/csi-lib-utils/protosanitizer/protosanitizer.go index af64a7b2..cad31f17 100644 --- a/vendor/github.com/kubernetes-csi/csi-lib-utils/protosanitizer/protosanitizer.go +++ b/vendor/github.com/kubernetes-csi/csi-lib-utils/protosanitizer/protosanitizer.go @@ -26,7 +26,6 @@ import ( "github.com/golang/protobuf/descriptor" "github.com/golang/protobuf/proto" - protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor" protobufdescriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" ) @@ -56,7 +55,7 @@ func StripSecretsCSI03(msg interface{}) fmt.Stringer { type stripSecrets struct { msg interface{} - isSecretField func(field *protobuf.FieldDescriptorProto) bool + isSecretField func(field *protobufdescriptor.FieldDescriptorProto) bool } func (s *stripSecrets) String() string { @@ -110,7 +109,7 @@ func (s *stripSecrets) strip(parsed interface{}, msg interface{}) { if _, ok := parsedFields[field.GetName()]; ok { parsedFields[field.GetName()] = "***stripped***" } - } else if field.GetType() == protobuf.FieldDescriptorProto_TYPE_MESSAGE { + } else if field.GetType() == protobufdescriptor.FieldDescriptorProto_TYPE_MESSAGE { // When we get here, // the type name is something like ".csi.v1.CapacityRange" (leading dot!) // and looking up "csi.v1.CapacityRange" @@ -150,7 +149,7 @@ func (s *stripSecrets) strip(parsed interface{}, msg interface{}) { // isCSI1Secret uses the csi.E_CsiSecret extension from CSI 1.0 to // determine whether a field contains secrets. -func isCSI1Secret(field *protobuf.FieldDescriptorProto) bool { +func isCSI1Secret(field *protobufdescriptor.FieldDescriptorProto) bool { ex, err := proto.GetExtension(field.Options, e_CsiSecret) return err == nil && ex != nil && *ex.(*bool) } @@ -172,6 +171,6 @@ var e_CsiSecret = &proto.ExtensionDesc{ // isCSI03Secret relies on the naming convention in CSI <= 0.3 // to determine whether a field contains secrets. -func isCSI03Secret(field *protobuf.FieldDescriptorProto) bool { +func isCSI03Secret(field *protobufdescriptor.FieldDescriptorProto) bool { return strings.HasSuffix(field.GetName(), "_secrets") } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/collectors/go_collector_latest.go b/vendor/github.com/prometheus/client_golang/prometheus/collectors/go_collector_latest.go index 246c5ea9..2f561689 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/collectors/go_collector_latest.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/collectors/go_collector_latest.go @@ -28,6 +28,8 @@ var ( MetricsAll = GoRuntimeMetricsRule{regexp.MustCompile("/.*")} // MetricsGC allows only GC metrics to be collected from Go runtime. // e.g. go_gc_cycles_automatic_gc_cycles_total + // NOTE: This does not include new class of "/cpu/classes/gc/..." metrics. + // Use custom metric rule to access those. MetricsGC = GoRuntimeMetricsRule{regexp.MustCompile(`^/gc/.*`)} // MetricsMemory allows only memory metrics to be collected from Go runtime. // e.g. go_memory_classes_heap_free_bytes diff --git a/vendor/github.com/prometheus/client_golang/prometheus/counter.go b/vendor/github.com/prometheus/client_golang/prometheus/counter.go index a912b75a..62de4dc5 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/counter.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/counter.go @@ -59,6 +59,18 @@ type ExemplarAdder interface { // CounterOpts is an alias for Opts. See there for doc comments. type CounterOpts Opts +// CounterVecOpts bundles the options to create a CounterVec metric. +// It is mandatory to set CounterOpts, see there for mandatory fields. VariableLabels +// is optional and can safely be left to its default value. +type CounterVecOpts struct { + CounterOpts + + // VariableLabels are used to partition the metric vector by the given set + // of labels. Each label value will be constrained with the optional Contraint + // function, if provided. + VariableLabels ConstrainableLabels +} + // NewCounter creates a new Counter based on the provided CounterOpts. // // The returned implementation also implements ExemplarAdder. It is safe to @@ -174,16 +186,24 @@ type CounterVec struct { // NewCounterVec creates a new CounterVec based on the provided CounterOpts and // partitioned by the given label names. func NewCounterVec(opts CounterOpts, labelNames []string) *CounterVec { - desc := NewDesc( + return V2.NewCounterVec(CounterVecOpts{ + CounterOpts: opts, + VariableLabels: UnconstrainedLabels(labelNames), + }) +} + +// NewCounterVec creates a new CounterVec based on the provided CounterVecOpts. +func (v2) NewCounterVec(opts CounterVecOpts) *CounterVec { + desc := V2.NewDesc( BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), opts.Help, - labelNames, + opts.VariableLabels, opts.ConstLabels, ) return &CounterVec{ MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { if len(lvs) != len(desc.variableLabels) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels, lvs)) + panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.labelNames(), lvs)) } result := &counter{desc: desc, labelPairs: MakeLabelPairs(desc, lvs), now: time.Now} result.init(result) // Init self-collection. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/desc.go b/vendor/github.com/prometheus/client_golang/prometheus/desc.go index 8bc5e44e..deedc2df 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/desc.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/desc.go @@ -14,20 +14,16 @@ package prometheus import ( - "errors" "fmt" "sort" "strings" "github.com/cespare/xxhash/v2" - - "github.com/prometheus/client_golang/prometheus/internal" - - //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. - "github.com/golang/protobuf/proto" + dto "github.com/prometheus/client_model/go" "github.com/prometheus/common/model" + "google.golang.org/protobuf/proto" - dto "github.com/prometheus/client_model/go" + "github.com/prometheus/client_golang/prometheus/internal" ) // Desc is the descriptor used by every Prometheus Metric. It is essentially @@ -54,9 +50,9 @@ type Desc struct { // constLabelPairs contains precalculated DTO label pairs based on // the constant labels. constLabelPairs []*dto.LabelPair - // variableLabels contains names of labels for which the metric - // maintains variable values. - variableLabels []string + // variableLabels contains names of labels and normalization function for + // which the metric maintains variable values. + variableLabels ConstrainedLabels // id is a hash of the values of the ConstLabels and fqName. This // must be unique among all registered descriptors and can therefore be // used as an identifier of the descriptor. @@ -80,10 +76,24 @@ type Desc struct { // For constLabels, the label values are constant. Therefore, they are fully // specified in the Desc. See the Collector example for a usage pattern. func NewDesc(fqName, help string, variableLabels []string, constLabels Labels) *Desc { + return V2.NewDesc(fqName, help, UnconstrainedLabels(variableLabels), constLabels) +} + +// NewDesc allocates and initializes a new Desc. Errors are recorded in the Desc +// and will be reported on registration time. variableLabels and constLabels can +// be nil if no such labels should be set. fqName must not be empty. +// +// variableLabels only contain the label names and normalization functions. Their +// label values are variable and therefore not part of the Desc. (They are managed +// within the Metric.) +// +// For constLabels, the label values are constant. Therefore, they are fully +// specified in the Desc. See the Collector example for a usage pattern. +func (v2) NewDesc(fqName, help string, variableLabels ConstrainableLabels, constLabels Labels) *Desc { d := &Desc{ fqName: fqName, help: help, - variableLabels: variableLabels, + variableLabels: variableLabels.constrainedLabels(), } if !model.IsValidMetricName(model.LabelValue(fqName)) { d.err = fmt.Errorf("%q is not a valid metric name", fqName) @@ -93,7 +103,7 @@ func NewDesc(fqName, help string, variableLabels []string, constLabels Labels) * // their sorted label names) plus the fqName (at position 0). labelValues := make([]string, 1, len(constLabels)+1) labelValues[0] = fqName - labelNames := make([]string, 0, len(constLabels)+len(variableLabels)) + labelNames := make([]string, 0, len(constLabels)+len(d.variableLabels)) labelNameSet := map[string]struct{}{} // First add only the const label names and sort them... for labelName := range constLabels { @@ -118,16 +128,16 @@ func NewDesc(fqName, help string, variableLabels []string, constLabels Labels) * // Now add the variable label names, but prefix them with something that // cannot be in a regular label name. That prevents matching the label // dimension with a different mix between preset and variable labels. - for _, labelName := range variableLabels { - if !checkLabelName(labelName) { - d.err = fmt.Errorf("%q is not a valid label name for metric %q", labelName, fqName) + for _, label := range d.variableLabels { + if !checkLabelName(label.Name) { + d.err = fmt.Errorf("%q is not a valid label name for metric %q", label.Name, fqName) return d } - labelNames = append(labelNames, "$"+labelName) - labelNameSet[labelName] = struct{}{} + labelNames = append(labelNames, "$"+label.Name) + labelNameSet[label.Name] = struct{}{} } if len(labelNames) != len(labelNameSet) { - d.err = errors.New("duplicate label names") + d.err = fmt.Errorf("duplicate label names in constant and variable labels for metric %q", fqName) return d } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/doc.go b/vendor/github.com/prometheus/client_golang/prometheus/doc.go index 811072cb..962608f0 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/doc.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/doc.go @@ -37,35 +37,35 @@ // // type metrics struct { // cpuTemp prometheus.Gauge -// hdFailures *prometheus.CounterVec +// hdFailures *prometheus.CounterVec // } // // func NewMetrics(reg prometheus.Registerer) *metrics { -// m := &metrics{ -// cpuTemp: prometheus.NewGauge(prometheus.GaugeOpts{ -// Name: "cpu_temperature_celsius", -// Help: "Current temperature of the CPU.", -// }), -// hdFailures: prometheus.NewCounterVec( -// prometheus.CounterOpts{ -// Name: "hd_errors_total", -// Help: "Number of hard-disk errors.", -// }, -// []string{"device"}, -// ), -// } -// reg.MustRegister(m.cpuTemp) -// reg.MustRegister(m.hdFailures) -// return m +// m := &metrics{ +// cpuTemp: prometheus.NewGauge(prometheus.GaugeOpts{ +// Name: "cpu_temperature_celsius", +// Help: "Current temperature of the CPU.", +// }), +// hdFailures: prometheus.NewCounterVec( +// prometheus.CounterOpts{ +// Name: "hd_errors_total", +// Help: "Number of hard-disk errors.", +// }, +// []string{"device"}, +// ), +// } +// reg.MustRegister(m.cpuTemp) +// reg.MustRegister(m.hdFailures) +// return m // } // // func main() { -// // Create a non-global registry. -// reg := prometheus.NewRegistry() +// // Create a non-global registry. +// reg := prometheus.NewRegistry() // -// // Create new metrics and register them using the custom registry. -// m := NewMetrics(reg) -// // Set values for the new created metrics. +// // Create new metrics and register them using the custom registry. +// m := NewMetrics(reg) +// // Set values for the new created metrics. // m.cpuTemp.Set(65.3) // m.hdFailures.With(prometheus.Labels{"device":"/dev/sda"}).Inc() // diff --git a/vendor/github.com/prometheus/client_golang/prometheus/gauge.go b/vendor/github.com/prometheus/client_golang/prometheus/gauge.go index 21271a5b..f1ea6c76 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/gauge.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/gauge.go @@ -55,6 +55,18 @@ type Gauge interface { // GaugeOpts is an alias for Opts. See there for doc comments. type GaugeOpts Opts +// GaugeVecOpts bundles the options to create a GaugeVec metric. +// It is mandatory to set GaugeOpts, see there for mandatory fields. VariableLabels +// is optional and can safely be left to its default value. +type GaugeVecOpts struct { + GaugeOpts + + // VariableLabels are used to partition the metric vector by the given set + // of labels. Each label value will be constrained with the optional Contraint + // function, if provided. + VariableLabels ConstrainableLabels +} + // NewGauge creates a new Gauge based on the provided GaugeOpts. // // The returned implementation is optimized for a fast Set method. If you have a @@ -138,16 +150,24 @@ type GaugeVec struct { // NewGaugeVec creates a new GaugeVec based on the provided GaugeOpts and // partitioned by the given label names. func NewGaugeVec(opts GaugeOpts, labelNames []string) *GaugeVec { - desc := NewDesc( + return V2.NewGaugeVec(GaugeVecOpts{ + GaugeOpts: opts, + VariableLabels: UnconstrainedLabels(labelNames), + }) +} + +// NewGaugeVec creates a new GaugeVec based on the provided GaugeVecOpts. +func (v2) NewGaugeVec(opts GaugeVecOpts) *GaugeVec { + desc := V2.NewDesc( BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), opts.Help, - labelNames, + opts.VariableLabels, opts.ConstLabels, ) return &GaugeVec{ MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { if len(lvs) != len(desc.variableLabels) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels, lvs)) + panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.labelNames(), lvs)) } result := &gauge{desc: desc, labelPairs: MakeLabelPairs(desc, lvs)} result.init(result) // Init self-collection. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_latest.go b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_latest.go index 3a2d55e8..2d8d9f64 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_latest.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_latest.go @@ -23,11 +23,10 @@ import ( "strings" "sync" - //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. - "github.com/golang/protobuf/proto" - dto "github.com/prometheus/client_model/go" - "github.com/prometheus/client_golang/prometheus/internal" + + dto "github.com/prometheus/client_model/go" + "google.golang.org/protobuf/proto" ) const ( diff --git a/vendor/github.com/prometheus/client_golang/prometheus/histogram.go b/vendor/github.com/prometheus/client_golang/prometheus/histogram.go index 4c873a01..8d818afe 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/histogram.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/histogram.go @@ -22,10 +22,9 @@ import ( "sync/atomic" "time" - //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. - "github.com/golang/protobuf/proto" - dto "github.com/prometheus/client_model/go" + + "google.golang.org/protobuf/proto" ) // nativeHistogramBounds for the frac of observed values. Only relevant for @@ -402,7 +401,7 @@ type HistogramOpts struct { // Histogram by a Prometheus server with that feature enabled (requires // Prometheus v2.40+). Sparse buckets are exponential buckets covering // the whole float64 range (with the exception of the “zero” bucket, see - // SparseBucketsZeroThreshold below). From any one bucket to the next, + // NativeHistogramZeroThreshold below). From any one bucket to the next, // the width of the bucket grows by a constant // factor. NativeHistogramBucketFactor provides an upper bound for this // factor (exception see below). The smaller @@ -433,7 +432,7 @@ type HistogramOpts struct { // bucket. For best results, this should be close to a bucket // boundary. This is usually the case if picking a power of two. If // NativeHistogramZeroThreshold is left at zero, - // DefSparseBucketsZeroThreshold is used as the threshold. To configure + // DefNativeHistogramZeroThreshold is used as the threshold. To configure // a zero bucket with an actual threshold of zero (i.e. only // observations of precisely zero will go into the zero bucket), set // NativeHistogramZeroThreshold to the NativeHistogramZeroThresholdZero @@ -469,6 +468,18 @@ type HistogramOpts struct { NativeHistogramMaxZeroThreshold float64 } +// HistogramVecOpts bundles the options to create a HistogramVec metric. +// It is mandatory to set HistogramOpts, see there for mandatory fields. VariableLabels +// is optional and can safely be left to its default value. +type HistogramVecOpts struct { + HistogramOpts + + // VariableLabels are used to partition the metric vector by the given set + // of labels. Each label value will be constrained with the optional Contraint + // function, if provided. + VariableLabels ConstrainableLabels +} + // NewHistogram creates a new Histogram based on the provided HistogramOpts. It // panics if the buckets in HistogramOpts are not in strictly increasing order. // @@ -489,11 +500,11 @@ func NewHistogram(opts HistogramOpts) Histogram { func newHistogram(desc *Desc, opts HistogramOpts, labelValues ...string) Histogram { if len(desc.variableLabels) != len(labelValues) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels, labelValues)) + panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.labelNames(), labelValues)) } for _, n := range desc.variableLabels { - if n == bucketLabel { + if n.Name == bucketLabel { panic(errBucketLabelNotAllowed) } } @@ -544,16 +555,12 @@ func newHistogram(desc *Desc, opts HistogramOpts, labelValues ...string) Histogr } // Finally we know the final length of h.upperBounds and can make buckets // for both counts as well as exemplars: - h.counts[0] = &histogramCounts{ - buckets: make([]uint64, len(h.upperBounds)), - nativeHistogramZeroThresholdBits: math.Float64bits(h.nativeHistogramZeroThreshold), - nativeHistogramSchema: h.nativeHistogramSchema, - } - h.counts[1] = &histogramCounts{ - buckets: make([]uint64, len(h.upperBounds)), - nativeHistogramZeroThresholdBits: math.Float64bits(h.nativeHistogramZeroThreshold), - nativeHistogramSchema: h.nativeHistogramSchema, - } + h.counts[0] = &histogramCounts{buckets: make([]uint64, len(h.upperBounds))} + atomic.StoreUint64(&h.counts[0].nativeHistogramZeroThresholdBits, math.Float64bits(h.nativeHistogramZeroThreshold)) + atomic.StoreInt32(&h.counts[0].nativeHistogramSchema, h.nativeHistogramSchema) + h.counts[1] = &histogramCounts{buckets: make([]uint64, len(h.upperBounds))} + atomic.StoreUint64(&h.counts[1].nativeHistogramZeroThresholdBits, math.Float64bits(h.nativeHistogramZeroThreshold)) + atomic.StoreInt32(&h.counts[1].nativeHistogramSchema, h.nativeHistogramSchema) h.exemplars = make([]atomic.Value, len(h.upperBounds)+1) h.init(h) // Init self-collection. @@ -632,8 +639,8 @@ func (hc *histogramCounts) observe(v float64, bucket int, doSparse bool) { if frac == 0.5 { key-- } - div := 1 << -schema - key = (key + div - 1) / div + offset := (1 << -schema) - 1 + key = (key + offset) >> -schema } if isInf { key++ @@ -810,7 +817,7 @@ func (h *histogram) observe(v float64, bucket int) { } } -// limitSparsebuckets applies a strategy to limit the number of populated sparse +// limitBuckets applies a strategy to limit the number of populated sparse // buckets. It's generally best effort, and there are situations where the // number can go higher (if even the lowest resolution isn't enough to reduce // the number sufficiently, or if the provided counts aren't fully updated yet @@ -1034,15 +1041,23 @@ type HistogramVec struct { // NewHistogramVec creates a new HistogramVec based on the provided HistogramOpts and // partitioned by the given label names. func NewHistogramVec(opts HistogramOpts, labelNames []string) *HistogramVec { - desc := NewDesc( + return V2.NewHistogramVec(HistogramVecOpts{ + HistogramOpts: opts, + VariableLabels: UnconstrainedLabels(labelNames), + }) +} + +// NewHistogramVec creates a new HistogramVec based on the provided HistogramVecOpts. +func (v2) NewHistogramVec(opts HistogramVecOpts) *HistogramVec { + desc := V2.NewDesc( BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), opts.Help, - labelNames, + opts.VariableLabels, opts.ConstLabels, ) return &HistogramVec{ MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { - return newHistogram(desc, opts, lvs...) + return newHistogram(desc, opts.HistogramOpts, lvs...) }), } } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/labels.go b/vendor/github.com/prometheus/client_golang/prometheus/labels.go index c1b8fad3..63ff8683 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/labels.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/labels.go @@ -32,6 +32,78 @@ import ( // create a Desc. type Labels map[string]string +// ConstrainedLabels represents a label name and its constrain function +// to normalize label values. This type is commonly used when constructing +// metric vector Collectors. +type ConstrainedLabel struct { + Name string + Constraint func(string) string +} + +func (cl ConstrainedLabel) Constrain(v string) string { + if cl.Constraint == nil { + return v + } + return cl.Constraint(v) +} + +// ConstrainableLabels is an interface that allows creating of labels that can +// be optionally constrained. +// +// prometheus.V2().NewCounterVec(CounterVecOpts{ +// CounterOpts: {...}, // Usual CounterOpts fields +// VariableLabels: []ConstrainedLabels{ +// {Name: "A"}, +// {Name: "B", Constraint: func(v string) string { ... }}, +// }, +// }) +type ConstrainableLabels interface { + constrainedLabels() ConstrainedLabels + labelNames() []string +} + +// ConstrainedLabels represents a collection of label name -> constrain function +// to normalize label values. This type is commonly used when constructing +// metric vector Collectors. +type ConstrainedLabels []ConstrainedLabel + +func (cls ConstrainedLabels) constrainedLabels() ConstrainedLabels { + return cls +} + +func (cls ConstrainedLabels) labelNames() []string { + names := make([]string, len(cls)) + for i, label := range cls { + names[i] = label.Name + } + return names +} + +// UnconstrainedLabels represents collection of label without any constraint on +// their value. Thus, it is simply a collection of label names. +// +// UnconstrainedLabels([]string{ "A", "B" }) +// +// is equivalent to +// +// ConstrainedLabels { +// { Name: "A" }, +// { Name: "B" }, +// } +type UnconstrainedLabels []string + +func (uls UnconstrainedLabels) constrainedLabels() ConstrainedLabels { + constrainedLabels := make([]ConstrainedLabel, len(uls)) + for i, l := range uls { + constrainedLabels[i] = ConstrainedLabel{Name: l} + } + return constrainedLabels +} + +func (uls UnconstrainedLabels) labelNames() []string { + return uls +} + // reservedLabelPrefix is a prefix which is not legal in user-supplied // label names. const reservedLabelPrefix = "__" diff --git a/vendor/github.com/prometheus/client_golang/prometheus/metric.go b/vendor/github.com/prometheus/client_golang/prometheus/metric.go index b5119c50..07bbc9d7 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/metric.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/metric.go @@ -20,11 +20,9 @@ import ( "strings" "time" - //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. - "github.com/golang/protobuf/proto" - "github.com/prometheus/common/model" - dto "github.com/prometheus/client_model/go" + "github.com/prometheus/common/model" + "google.golang.org/protobuf/proto" ) var separatorByteSlice = []byte{model.SeparatorByte} // For convenient use with xxhash. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/http.go b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/http.go index a4cc9810..09b8d2fb 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/http.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/http.go @@ -37,6 +37,7 @@ import ( "fmt" "io" "net/http" + "strconv" "strings" "sync" "time" @@ -47,9 +48,10 @@ import ( ) const ( - contentTypeHeader = "Content-Type" - contentEncodingHeader = "Content-Encoding" - acceptEncodingHeader = "Accept-Encoding" + contentTypeHeader = "Content-Type" + contentEncodingHeader = "Content-Encoding" + acceptEncodingHeader = "Accept-Encoding" + processStartTimeHeader = "Process-Start-Time-Unix" ) var gzipPool = sync.Pool{ @@ -121,6 +123,9 @@ func HandlerForTransactional(reg prometheus.TransactionalGatherer, opts HandlerO } h := http.HandlerFunc(func(rsp http.ResponseWriter, req *http.Request) { + if !opts.ProcessStartTime.IsZero() { + rsp.Header().Set(processStartTimeHeader, strconv.FormatInt(opts.ProcessStartTime.Unix(), 10)) + } if inFlightSem != nil { select { case inFlightSem <- struct{}{}: // All good, carry on. @@ -366,6 +371,14 @@ type HandlerOpts struct { // (which changes the identity of the resulting series on the Prometheus // server). EnableOpenMetrics bool + // ProcessStartTime allows setting process start timevalue that will be exposed + // with "Process-Start-Time-Unix" response header along with the metrics + // payload. This allow callers to have efficient transformations to cumulative + // counters (e.g. OpenTelemetry) or generally _created timestamp estimation per + // scrape target. + // NOTE: This feature is experimental and not covered by OpenMetrics or Prometheus + // exposition format. + ProcessStartTime time.Time } // gzipAccepted returns whether the client will accept gzip-encoded content. diff --git a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_client.go b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_client.go index 21086781..d3482c40 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_client.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_client.go @@ -68,16 +68,17 @@ func InstrumentRoundTripperCounter(counter *prometheus.CounterVec, next http.Rou o.apply(rtOpts) } - code, method := checkLabels(counter) + // Curry the counter with dynamic labels before checking the remaining labels. + code, method := checkLabels(counter.MustCurryWith(rtOpts.emptyDynamicLabels())) return func(r *http.Request) (*http.Response, error) { resp, err := next.RoundTrip(r) if err == nil { - addWithExemplar( - counter.With(labels(code, method, r.Method, resp.StatusCode, rtOpts.extraMethods...)), - 1, - rtOpts.getExemplarFn(r.Context()), - ) + l := labels(code, method, r.Method, resp.StatusCode, rtOpts.extraMethods...) + for label, resolve := range rtOpts.extraLabelsFromCtx { + l[label] = resolve(resp.Request.Context()) + } + addWithExemplar(counter.With(l), 1, rtOpts.getExemplarFn(r.Context())) } return resp, err } @@ -110,17 +111,18 @@ func InstrumentRoundTripperDuration(obs prometheus.ObserverVec, next http.RoundT o.apply(rtOpts) } - code, method := checkLabels(obs) + // Curry the observer with dynamic labels before checking the remaining labels. + code, method := checkLabels(obs.MustCurryWith(rtOpts.emptyDynamicLabels())) return func(r *http.Request) (*http.Response, error) { start := time.Now() resp, err := next.RoundTrip(r) if err == nil { - observeWithExemplar( - obs.With(labels(code, method, r.Method, resp.StatusCode, rtOpts.extraMethods...)), - time.Since(start).Seconds(), - rtOpts.getExemplarFn(r.Context()), - ) + l := labels(code, method, r.Method, resp.StatusCode, rtOpts.extraMethods...) + for label, resolve := range rtOpts.extraLabelsFromCtx { + l[label] = resolve(resp.Request.Context()) + } + observeWithExemplar(obs.With(l), time.Since(start).Seconds(), rtOpts.getExemplarFn(r.Context())) } return resp, err } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go index cca67a78..3793036a 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/instrument_server.go @@ -87,7 +87,8 @@ func InstrumentHandlerDuration(obs prometheus.ObserverVec, next http.Handler, op o.apply(hOpts) } - code, method := checkLabels(obs) + // Curry the observer with dynamic labels before checking the remaining labels. + code, method := checkLabels(obs.MustCurryWith(hOpts.emptyDynamicLabels())) if code { return func(w http.ResponseWriter, r *http.Request) { @@ -95,23 +96,22 @@ func InstrumentHandlerDuration(obs prometheus.ObserverVec, next http.Handler, op d := newDelegator(w, nil) next.ServeHTTP(d, r) - observeWithExemplar( - obs.With(labels(code, method, r.Method, d.Status(), hOpts.extraMethods...)), - time.Since(now).Seconds(), - hOpts.getExemplarFn(r.Context()), - ) + l := labels(code, method, r.Method, d.Status(), hOpts.extraMethods...) + for label, resolve := range hOpts.extraLabelsFromCtx { + l[label] = resolve(r.Context()) + } + observeWithExemplar(obs.With(l), time.Since(now).Seconds(), hOpts.getExemplarFn(r.Context())) } } return func(w http.ResponseWriter, r *http.Request) { now := time.Now() next.ServeHTTP(w, r) - - observeWithExemplar( - obs.With(labels(code, method, r.Method, 0, hOpts.extraMethods...)), - time.Since(now).Seconds(), - hOpts.getExemplarFn(r.Context()), - ) + l := labels(code, method, r.Method, 0, hOpts.extraMethods...) + for label, resolve := range hOpts.extraLabelsFromCtx { + l[label] = resolve(r.Context()) + } + observeWithExemplar(obs.With(l), time.Since(now).Seconds(), hOpts.getExemplarFn(r.Context())) } } @@ -138,28 +138,30 @@ func InstrumentHandlerCounter(counter *prometheus.CounterVec, next http.Handler, o.apply(hOpts) } - code, method := checkLabels(counter) + // Curry the counter with dynamic labels before checking the remaining labels. + code, method := checkLabels(counter.MustCurryWith(hOpts.emptyDynamicLabels())) if code { return func(w http.ResponseWriter, r *http.Request) { d := newDelegator(w, nil) next.ServeHTTP(d, r) - addWithExemplar( - counter.With(labels(code, method, r.Method, d.Status(), hOpts.extraMethods...)), - 1, - hOpts.getExemplarFn(r.Context()), - ) + l := labels(code, method, r.Method, d.Status(), hOpts.extraMethods...) + for label, resolve := range hOpts.extraLabelsFromCtx { + l[label] = resolve(r.Context()) + } + addWithExemplar(counter.With(l), 1, hOpts.getExemplarFn(r.Context())) } } return func(w http.ResponseWriter, r *http.Request) { next.ServeHTTP(w, r) - addWithExemplar( - counter.With(labels(code, method, r.Method, 0, hOpts.extraMethods...)), - 1, - hOpts.getExemplarFn(r.Context()), - ) + + l := labels(code, method, r.Method, 0, hOpts.extraMethods...) + for label, resolve := range hOpts.extraLabelsFromCtx { + l[label] = resolve(r.Context()) + } + addWithExemplar(counter.With(l), 1, hOpts.getExemplarFn(r.Context())) } } @@ -191,16 +193,17 @@ func InstrumentHandlerTimeToWriteHeader(obs prometheus.ObserverVec, next http.Ha o.apply(hOpts) } - code, method := checkLabels(obs) + // Curry the observer with dynamic labels before checking the remaining labels. + code, method := checkLabels(obs.MustCurryWith(hOpts.emptyDynamicLabels())) return func(w http.ResponseWriter, r *http.Request) { now := time.Now() d := newDelegator(w, func(status int) { - observeWithExemplar( - obs.With(labels(code, method, r.Method, status, hOpts.extraMethods...)), - time.Since(now).Seconds(), - hOpts.getExemplarFn(r.Context()), - ) + l := labels(code, method, r.Method, status, hOpts.extraMethods...) + for label, resolve := range hOpts.extraLabelsFromCtx { + l[label] = resolve(r.Context()) + } + observeWithExemplar(obs.With(l), time.Since(now).Seconds(), hOpts.getExemplarFn(r.Context())) }) next.ServeHTTP(d, r) } @@ -231,28 +234,32 @@ func InstrumentHandlerRequestSize(obs prometheus.ObserverVec, next http.Handler, o.apply(hOpts) } - code, method := checkLabels(obs) + // Curry the observer with dynamic labels before checking the remaining labels. + code, method := checkLabels(obs.MustCurryWith(hOpts.emptyDynamicLabels())) + if code { return func(w http.ResponseWriter, r *http.Request) { d := newDelegator(w, nil) next.ServeHTTP(d, r) size := computeApproximateRequestSize(r) - observeWithExemplar( - obs.With(labels(code, method, r.Method, d.Status(), hOpts.extraMethods...)), - float64(size), - hOpts.getExemplarFn(r.Context()), - ) + + l := labels(code, method, r.Method, d.Status(), hOpts.extraMethods...) + for label, resolve := range hOpts.extraLabelsFromCtx { + l[label] = resolve(r.Context()) + } + observeWithExemplar(obs.With(l), float64(size), hOpts.getExemplarFn(r.Context())) } } return func(w http.ResponseWriter, r *http.Request) { next.ServeHTTP(w, r) size := computeApproximateRequestSize(r) - observeWithExemplar( - obs.With(labels(code, method, r.Method, 0, hOpts.extraMethods...)), - float64(size), - hOpts.getExemplarFn(r.Context()), - ) + + l := labels(code, method, r.Method, 0, hOpts.extraMethods...) + for label, resolve := range hOpts.extraLabelsFromCtx { + l[label] = resolve(r.Context()) + } + observeWithExemplar(obs.With(l), float64(size), hOpts.getExemplarFn(r.Context())) } } @@ -281,16 +288,18 @@ func InstrumentHandlerResponseSize(obs prometheus.ObserverVec, next http.Handler o.apply(hOpts) } - code, method := checkLabels(obs) + // Curry the observer with dynamic labels before checking the remaining labels. + code, method := checkLabels(obs.MustCurryWith(hOpts.emptyDynamicLabels())) return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { d := newDelegator(w, nil) next.ServeHTTP(d, r) - observeWithExemplar( - obs.With(labels(code, method, r.Method, d.Status(), hOpts.extraMethods...)), - float64(d.Written()), - hOpts.getExemplarFn(r.Context()), - ) + + l := labels(code, method, r.Method, d.Status(), hOpts.extraMethods...) + for label, resolve := range hOpts.extraLabelsFromCtx { + l[label] = resolve(r.Context()) + } + observeWithExemplar(obs.With(l), float64(d.Written()), hOpts.getExemplarFn(r.Context())) }) } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/option.go b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/option.go index c590d912..5d4383aa 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/promhttp/option.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/promhttp/option.go @@ -24,14 +24,32 @@ type Option interface { apply(*options) } +// LabelValueFromCtx are used to compute the label value from request context. +// Context can be filled with values from request through middleware. +type LabelValueFromCtx func(ctx context.Context) string + // options store options for both a handler or round tripper. type options struct { - extraMethods []string - getExemplarFn func(requestCtx context.Context) prometheus.Labels + extraMethods []string + getExemplarFn func(requestCtx context.Context) prometheus.Labels + extraLabelsFromCtx map[string]LabelValueFromCtx } func defaultOptions() *options { - return &options{getExemplarFn: func(ctx context.Context) prometheus.Labels { return nil }} + return &options{ + getExemplarFn: func(ctx context.Context) prometheus.Labels { return nil }, + extraLabelsFromCtx: map[string]LabelValueFromCtx{}, + } +} + +func (o *options) emptyDynamicLabels() prometheus.Labels { + labels := prometheus.Labels{} + + for label := range o.extraLabelsFromCtx { + labels[label] = "" + } + + return labels } type optionApplyFunc func(*options) @@ -48,11 +66,19 @@ func WithExtraMethods(methods ...string) Option { }) } -// WithExemplarFromContext adds allows to put a hook to all counter and histogram metrics. -// If the hook function returns non-nil labels, exemplars will be added for that request, otherwise metric -// will get instrumented without exemplar. +// WithExemplarFromContext allows to inject function that will get exemplar from context that will be put to counter and histogram metrics. +// If the function returns nil labels or the metric does not support exemplars, no exemplar will be added (noop), but +// metric will continue to observe/increment. func WithExemplarFromContext(getExemplarFn func(requestCtx context.Context) prometheus.Labels) Option { return optionApplyFunc(func(o *options) { o.getExemplarFn = getExemplarFn }) } + +// WithLabelFromCtx registers a label for dynamic resolution with access to context. +// See the example for ExampleInstrumentHandlerWithLabelResolver for example usage +func WithLabelFromCtx(name string, valueFn LabelValueFromCtx) Option { + return optionApplyFunc(func(o *options) { + o.extraLabelsFromCtx[name] = valueFn + }) +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/registry.go b/vendor/github.com/prometheus/client_golang/prometheus/registry.go index 09e34d30..44da9433 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/registry.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/registry.go @@ -21,18 +21,17 @@ import ( "path/filepath" "runtime" "sort" + "strconv" "strings" "sync" "unicode/utf8" - "github.com/cespare/xxhash/v2" - //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. - "github.com/golang/protobuf/proto" - "github.com/prometheus/common/expfmt" + "github.com/prometheus/client_golang/prometheus/internal" + "github.com/cespare/xxhash/v2" dto "github.com/prometheus/client_model/go" - - "github.com/prometheus/client_golang/prometheus/internal" + "github.com/prometheus/common/expfmt" + "google.golang.org/protobuf/proto" ) const ( @@ -933,6 +932,10 @@ func checkMetricConsistency( h.WriteString(lp.GetValue()) h.Write(separatorByteSlice) } + if dtoMetric.TimestampMs != nil { + h.WriteString(strconv.FormatInt(*(dtoMetric.TimestampMs), 10)) + h.Write(separatorByteSlice) + } hSum := h.Sum64() if _, exists := metricHashes[hSum]; exists { return fmt.Errorf( @@ -962,7 +965,7 @@ func checkDescConsistency( copy(lpsFromDesc, desc.constLabelPairs) for _, l := range desc.variableLabels { lpsFromDesc = append(lpsFromDesc, &dto.LabelPair{ - Name: proto.String(l), + Name: proto.String(l.Name), }) } if len(lpsFromDesc) != len(dtoMetric.Label) { diff --git a/vendor/github.com/prometheus/client_golang/prometheus/summary.go b/vendor/github.com/prometheus/client_golang/prometheus/summary.go index 7bc448a8..dd359264 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/summary.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/summary.go @@ -22,11 +22,10 @@ import ( "sync/atomic" "time" - "github.com/beorn7/perks/quantile" - //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. - "github.com/golang/protobuf/proto" - dto "github.com/prometheus/client_model/go" + + "github.com/beorn7/perks/quantile" + "google.golang.org/protobuf/proto" ) // quantileLabel is used for the label that defines the quantile in a @@ -148,6 +147,18 @@ type SummaryOpts struct { BufCap uint32 } +// SummaryVecOpts bundles the options to create a SummaryVec metric. +// It is mandatory to set SummaryOpts, see there for mandatory fields. VariableLabels +// is optional and can safely be left to its default value. +type SummaryVecOpts struct { + SummaryOpts + + // VariableLabels are used to partition the metric vector by the given set + // of labels. Each label value will be constrained with the optional Contraint + // function, if provided. + VariableLabels ConstrainableLabels +} + // Problem with the sliding-window decay algorithm... The Merge method of // perk/quantile is actually not working as advertised - and it might be // unfixable, as the underlying algorithm is apparently not capable of merging @@ -178,11 +189,11 @@ func NewSummary(opts SummaryOpts) Summary { func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { if len(desc.variableLabels) != len(labelValues) { - panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels, labelValues)) + panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels.labelNames(), labelValues)) } for _, n := range desc.variableLabels { - if n == quantileLabel { + if n.Name == quantileLabel { panic(errQuantileLabelNotAllowed) } } @@ -530,20 +541,28 @@ type SummaryVec struct { // it is handled by the Prometheus server internally, “quantile” is an illegal // label name. NewSummaryVec will panic if this label name is used. func NewSummaryVec(opts SummaryOpts, labelNames []string) *SummaryVec { - for _, ln := range labelNames { + return V2.NewSummaryVec(SummaryVecOpts{ + SummaryOpts: opts, + VariableLabels: UnconstrainedLabels(labelNames), + }) +} + +// NewSummaryVec creates a new SummaryVec based on the provided SummaryVecOpts. +func (v2) NewSummaryVec(opts SummaryVecOpts) *SummaryVec { + for _, ln := range opts.VariableLabels.labelNames() { if ln == quantileLabel { panic(errQuantileLabelNotAllowed) } } - desc := NewDesc( + desc := V2.NewDesc( BuildFQName(opts.Namespace, opts.Subsystem, opts.Name), opts.Help, - labelNames, + opts.VariableLabels, opts.ConstLabels, ) return &SummaryVec{ MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { - return newSummary(desc, opts, lvs...) + return newSummary(desc, opts.SummaryOpts, lvs...) }), } } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/timer.go b/vendor/github.com/prometheus/client_golang/prometheus/timer.go index f28a76f3..52344fef 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/timer.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/timer.go @@ -23,7 +23,9 @@ type Timer struct { } // NewTimer creates a new Timer. The provided Observer is used to observe a -// duration in seconds. Timer is usually used to time a function call in the +// duration in seconds. If the Observer implements ExemplarObserver, passing exemplar +// later on will be also supported. +// Timer is usually used to time a function call in the // following way: // // func TimeMe() { @@ -31,6 +33,14 @@ type Timer struct { // defer timer.ObserveDuration() // // Do actual work. // } +// +// or +// +// func TimeMeWithExemplar() { +// timer := NewTimer(myHistogram) +// defer timer.ObserveDurationWithExemplar(exemplar) +// // Do actual work. +// } func NewTimer(o Observer) *Timer { return &Timer{ begin: time.Now(), @@ -53,3 +63,19 @@ func (t *Timer) ObserveDuration() time.Duration { } return d } + +// ObserveDurationWithExemplar is like ObserveDuration, but it will also +// observe exemplar with the duration unless exemplar is nil or provided Observer can't +// be casted to ExemplarObserver. +func (t *Timer) ObserveDurationWithExemplar(exemplar Labels) time.Duration { + d := time.Since(t.begin) + eo, ok := t.observer.(ExemplarObserver) + if ok && exemplar != nil { + eo.ObserveWithExemplar(d.Seconds(), exemplar) + return d + } + if t.observer != nil { + t.observer.Observe(d.Seconds()) + } + return d +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/value.go b/vendor/github.com/prometheus/client_golang/prometheus/value.go index 2d3abc1c..5f6bb800 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/value.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/value.go @@ -19,13 +19,11 @@ import ( "time" "unicode/utf8" - //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. - "github.com/golang/protobuf/proto" - "google.golang.org/protobuf/types/known/timestamppb" - "github.com/prometheus/client_golang/prometheus/internal" dto "github.com/prometheus/client_model/go" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" ) // ValueType is an enumeration of metric types that represent a simple value. @@ -188,9 +186,9 @@ func MakeLabelPairs(desc *Desc, labelValues []string) []*dto.LabelPair { return desc.constLabelPairs } labelPairs := make([]*dto.LabelPair, 0, totalLen) - for i, n := range desc.variableLabels { + for i, l := range desc.variableLabels { labelPairs = append(labelPairs, &dto.LabelPair{ - Name: proto.String(n), + Name: proto.String(l.Name), Value: proto.String(labelValues[i]), }) } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/vec.go b/vendor/github.com/prometheus/client_golang/prometheus/vec.go index 7ae32259..f0d0015a 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/vec.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/vec.go @@ -20,6 +20,24 @@ import ( "github.com/prometheus/common/model" ) +var labelsPool = &sync.Pool{ + New: func() interface{} { + return make(Labels) + }, +} + +func getLabelsFromPool() Labels { + return labelsPool.Get().(Labels) +} + +func putLabelsToPool(labels Labels) { + for k := range labels { + delete(labels, k) + } + + labelsPool.Put(labels) +} + // MetricVec is a Collector to bundle metrics of the same name that differ in // their label values. MetricVec is not used directly but as a building block // for implementations of vectors of a given metric type, like GaugeVec, @@ -72,6 +90,7 @@ func NewMetricVec(desc *Desc, newMetric func(lvs ...string) Metric) *MetricVec { // with a performance overhead (for creating and processing the Labels map). // See also the CounterVec example. func (m *MetricVec) DeleteLabelValues(lvs ...string) bool { + lvs = constrainLabelValues(m.desc, lvs, m.curry) h, err := m.hashLabelValues(lvs) if err != nil { return false @@ -91,6 +110,9 @@ func (m *MetricVec) DeleteLabelValues(lvs ...string) bool { // This method is used for the same purpose as DeleteLabelValues(...string). See // there for pros and cons of the two methods. func (m *MetricVec) Delete(labels Labels) bool { + labels = constrainLabels(m.desc, labels) + defer putLabelsToPool(labels) + h, err := m.hashLabels(labels) if err != nil { return false @@ -106,6 +128,9 @@ func (m *MetricVec) Delete(labels Labels) bool { // Note that curried labels will never be matched if deleting from the curried vector. // To match curried labels with DeletePartialMatch, it must be called on the base vector. func (m *MetricVec) DeletePartialMatch(labels Labels) int { + labels = constrainLabels(m.desc, labels) + defer putLabelsToPool(labels) + return m.metricMap.deleteByLabels(labels, m.curry) } @@ -145,10 +170,10 @@ func (m *MetricVec) CurryWith(labels Labels) (*MetricVec, error) { iCurry int ) for i, label := range m.desc.variableLabels { - val, ok := labels[label] + val, ok := labels[label.Name] if iCurry < len(oldCurry) && oldCurry[iCurry].index == i { if ok { - return nil, fmt.Errorf("label name %q is already curried", label) + return nil, fmt.Errorf("label name %q is already curried", label.Name) } newCurry = append(newCurry, oldCurry[iCurry]) iCurry++ @@ -156,7 +181,7 @@ func (m *MetricVec) CurryWith(labels Labels) (*MetricVec, error) { if !ok { continue // Label stays uncurried. } - newCurry = append(newCurry, curriedLabelValue{i, val}) + newCurry = append(newCurry, curriedLabelValue{i, label.Constrain(val)}) } } if l := len(oldCurry) + len(labels) - len(newCurry); l > 0 { @@ -199,6 +224,7 @@ func (m *MetricVec) CurryWith(labels Labels) (*MetricVec, error) { // a wrapper around MetricVec, implementing a vector for a specific Metric // implementation, for example GaugeVec. func (m *MetricVec) GetMetricWithLabelValues(lvs ...string) (Metric, error) { + lvs = constrainLabelValues(m.desc, lvs, m.curry) h, err := m.hashLabelValues(lvs) if err != nil { return nil, err @@ -224,6 +250,9 @@ func (m *MetricVec) GetMetricWithLabelValues(lvs ...string) (Metric, error) { // around MetricVec, implementing a vector for a specific Metric implementation, // for example GaugeVec. func (m *MetricVec) GetMetricWith(labels Labels) (Metric, error) { + labels = constrainLabels(m.desc, labels) + defer putLabelsToPool(labels) + h, err := m.hashLabels(labels) if err != nil { return nil, err @@ -266,16 +295,16 @@ func (m *MetricVec) hashLabels(labels Labels) (uint64, error) { iCurry int ) for i, label := range m.desc.variableLabels { - val, ok := labels[label] + val, ok := labels[label.Name] if iCurry < len(curry) && curry[iCurry].index == i { if ok { - return 0, fmt.Errorf("label name %q is already curried", label) + return 0, fmt.Errorf("label name %q is already curried", label.Name) } h = m.hashAdd(h, curry[iCurry].value) iCurry++ } else { if !ok { - return 0, fmt.Errorf("label name %q missing in label map", label) + return 0, fmt.Errorf("label name %q missing in label map", label.Name) } h = m.hashAdd(h, val) } @@ -453,7 +482,7 @@ func valueMatchesVariableOrCurriedValue(targetValue string, index int, values [] func matchPartialLabels(desc *Desc, values []string, labels Labels, curry []curriedLabelValue) bool { for l, v := range labels { // Check if the target label exists in our metrics and get the index. - varLabelIndex, validLabel := indexOf(l, desc.variableLabels) + varLabelIndex, validLabel := indexOf(l, desc.variableLabels.labelNames()) if validLabel { // Check the value of that label against the target value. // We don't consider curried values in partial matches. @@ -605,7 +634,7 @@ func matchLabels(desc *Desc, values []string, labels Labels, curry []curriedLabe iCurry++ continue } - if values[i] != labels[k] { + if values[i] != labels[k.Name] { return false } } @@ -621,7 +650,7 @@ func extractLabelValues(desc *Desc, labels Labels, curry []curriedLabelValue) [] iCurry++ continue } - labelValues[i] = labels[k] + labelValues[i] = labels[k.Name] } return labelValues } @@ -640,3 +669,35 @@ func inlineLabelValues(lvs []string, curry []curriedLabelValue) []string { } return labelValues } + +func constrainLabels(desc *Desc, labels Labels) Labels { + constrainedLabels := getLabelsFromPool() + for l, v := range labels { + if i, ok := indexOf(l, desc.variableLabels.labelNames()); ok { + v = desc.variableLabels[i].Constrain(v) + } + + constrainedLabels[l] = v + } + + return constrainedLabels +} + +func constrainLabelValues(desc *Desc, lvs []string, curry []curriedLabelValue) []string { + constrainedValues := make([]string, len(lvs)) + var iCurry, iLVs int + for i := 0; i < len(lvs)+len(curry); i++ { + if iCurry < len(curry) && curry[iCurry].index == i { + iCurry++ + continue + } + + if i < len(desc.variableLabels) { + constrainedValues[iLVs] = desc.variableLabels[i].Constrain(lvs[iLVs]) + } else { + constrainedValues[iLVs] = lvs[iLVs] + } + iLVs++ + } + return constrainedValues +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/vnext.go b/vendor/github.com/prometheus/client_golang/prometheus/vnext.go new file mode 100644 index 00000000..42bc3a8f --- /dev/null +++ b/vendor/github.com/prometheus/client_golang/prometheus/vnext.go @@ -0,0 +1,23 @@ +// Copyright 2022 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package prometheus + +type v2 struct{} + +// V2 is a struct that can be referenced to access experimental API that might +// be present in v2 of client golang someday. It offers extended functionality +// of v1 with slightly changed API. It is acceptable to use some pieces from v1 +// and e.g `prometheus.NewGauge` and some from v2 e.g. `prometheus.V2.NewDesc` +// in the same codebase. +var V2 = v2{} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go index 1498ee14..25da157f 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go @@ -17,12 +17,10 @@ import ( "fmt" "sort" - //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. - "github.com/golang/protobuf/proto" + "github.com/prometheus/client_golang/prometheus/internal" dto "github.com/prometheus/client_model/go" - - "github.com/prometheus/client_golang/prometheus/internal" + "google.golang.org/protobuf/proto" ) // WrapRegistererWith returns a Registerer wrapping the provided @@ -206,7 +204,7 @@ func wrapDesc(desc *Desc, prefix string, labels Labels) *Desc { constLabels[ln] = lv } // NewDesc will do remaining validations. - newDesc := NewDesc(prefix+desc.fqName, desc.help, desc.variableLabels, constLabels) + newDesc := V2.NewDesc(prefix+desc.fqName, desc.help, desc.variableLabels, constLabels) // Propagate errors if there was any. This will override any errer // created by NewDesc above, i.e. earlier errors get precedence. if desc.err != nil { diff --git a/vendor/github.com/prometheus/client_model/go/metrics.pb.go b/vendor/github.com/prometheus/client_model/go/metrics.pb.go index 35904ea1..2b5bca4b 100644 --- a/vendor/github.com/prometheus/client_model/go/metrics.pb.go +++ b/vendor/github.com/prometheus/client_model/go/metrics.pb.go @@ -1,25 +1,38 @@ +// Copyright 2013 Prometheus Team +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + // Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.30.0 +// protoc v3.20.3 // source: io/prometheus/client/metrics.proto package io_prometheus_client import ( - fmt "fmt" - proto "github.com/golang/protobuf/proto" - timestamp "github.com/golang/protobuf/ptypes/timestamp" - math "math" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" ) -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) type MetricType int32 @@ -38,23 +51,25 @@ const ( MetricType_GAUGE_HISTOGRAM MetricType = 5 ) -var MetricType_name = map[int32]string{ - 0: "COUNTER", - 1: "GAUGE", - 2: "SUMMARY", - 3: "UNTYPED", - 4: "HISTOGRAM", - 5: "GAUGE_HISTOGRAM", -} - -var MetricType_value = map[string]int32{ - "COUNTER": 0, - "GAUGE": 1, - "SUMMARY": 2, - "UNTYPED": 3, - "HISTOGRAM": 4, - "GAUGE_HISTOGRAM": 5, -} +// Enum value maps for MetricType. +var ( + MetricType_name = map[int32]string{ + 0: "COUNTER", + 1: "GAUGE", + 2: "SUMMARY", + 3: "UNTYPED", + 4: "HISTOGRAM", + 5: "GAUGE_HISTOGRAM", + } + MetricType_value = map[string]int32{ + "COUNTER": 0, + "GAUGE": 1, + "SUMMARY": 2, + "UNTYPED": 3, + "HISTOGRAM": 4, + "GAUGE_HISTOGRAM": 5, + } +) func (x MetricType) Enum() *MetricType { p := new(MetricType) @@ -63,449 +78,519 @@ func (x MetricType) Enum() *MetricType { } func (x MetricType) String() string { - return proto.EnumName(MetricType_name, int32(x)) + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } -func (x *MetricType) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(MetricType_value, data, "MetricType") +func (MetricType) Descriptor() protoreflect.EnumDescriptor { + return file_io_prometheus_client_metrics_proto_enumTypes[0].Descriptor() +} + +func (MetricType) Type() protoreflect.EnumType { + return &file_io_prometheus_client_metrics_proto_enumTypes[0] +} + +func (x MetricType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *MetricType) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) if err != nil { return err } - *x = MetricType(value) + *x = MetricType(num) return nil } +// Deprecated: Use MetricType.Descriptor instead. func (MetricType) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{0} + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{0} } type LabelPair struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Value *string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *LabelPair) Reset() { *m = LabelPair{} } -func (m *LabelPair) String() string { return proto.CompactTextString(m) } -func (*LabelPair) ProtoMessage() {} -func (*LabelPair) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{0} + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Value *string `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` } -func (m *LabelPair) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_LabelPair.Unmarshal(m, b) -} -func (m *LabelPair) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_LabelPair.Marshal(b, m, deterministic) -} -func (m *LabelPair) XXX_Merge(src proto.Message) { - xxx_messageInfo_LabelPair.Merge(m, src) +func (x *LabelPair) Reset() { + *x = LabelPair{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *LabelPair) XXX_Size() int { - return xxx_messageInfo_LabelPair.Size(m) + +func (x *LabelPair) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *LabelPair) XXX_DiscardUnknown() { - xxx_messageInfo_LabelPair.DiscardUnknown(m) + +func (*LabelPair) ProtoMessage() {} + +func (x *LabelPair) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_LabelPair proto.InternalMessageInfo +// Deprecated: Use LabelPair.ProtoReflect.Descriptor instead. +func (*LabelPair) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{0} +} -func (m *LabelPair) GetName() string { - if m != nil && m.Name != nil { - return *m.Name +func (x *LabelPair) GetName() string { + if x != nil && x.Name != nil { + return *x.Name } return "" } -func (m *LabelPair) GetValue() string { - if m != nil && m.Value != nil { - return *m.Value +func (x *LabelPair) GetValue() string { + if x != nil && x.Value != nil { + return *x.Value } return "" } type Gauge struct { - Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *Gauge) Reset() { *m = Gauge{} } -func (m *Gauge) String() string { return proto.CompactTextString(m) } -func (*Gauge) ProtoMessage() {} -func (*Gauge) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{1} + Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` } -func (m *Gauge) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Gauge.Unmarshal(m, b) -} -func (m *Gauge) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Gauge.Marshal(b, m, deterministic) -} -func (m *Gauge) XXX_Merge(src proto.Message) { - xxx_messageInfo_Gauge.Merge(m, src) +func (x *Gauge) Reset() { + *x = Gauge{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Gauge) XXX_Size() int { - return xxx_messageInfo_Gauge.Size(m) + +func (x *Gauge) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Gauge) XXX_DiscardUnknown() { - xxx_messageInfo_Gauge.DiscardUnknown(m) + +func (*Gauge) ProtoMessage() {} + +func (x *Gauge) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Gauge proto.InternalMessageInfo +// Deprecated: Use Gauge.ProtoReflect.Descriptor instead. +func (*Gauge) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{1} +} -func (m *Gauge) GetValue() float64 { - if m != nil && m.Value != nil { - return *m.Value +func (x *Gauge) GetValue() float64 { + if x != nil && x.Value != nil { + return *x.Value } return 0 } type Counter struct { - Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` - Exemplar *Exemplar `protobuf:"bytes,2,opt,name=exemplar" json:"exemplar,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *Counter) Reset() { *m = Counter{} } -func (m *Counter) String() string { return proto.CompactTextString(m) } -func (*Counter) ProtoMessage() {} -func (*Counter) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{2} + Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` + Exemplar *Exemplar `protobuf:"bytes,2,opt,name=exemplar" json:"exemplar,omitempty"` } -func (m *Counter) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Counter.Unmarshal(m, b) -} -func (m *Counter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Counter.Marshal(b, m, deterministic) -} -func (m *Counter) XXX_Merge(src proto.Message) { - xxx_messageInfo_Counter.Merge(m, src) +func (x *Counter) Reset() { + *x = Counter{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Counter) XXX_Size() int { - return xxx_messageInfo_Counter.Size(m) + +func (x *Counter) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Counter) XXX_DiscardUnknown() { - xxx_messageInfo_Counter.DiscardUnknown(m) + +func (*Counter) ProtoMessage() {} + +func (x *Counter) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Counter proto.InternalMessageInfo +// Deprecated: Use Counter.ProtoReflect.Descriptor instead. +func (*Counter) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{2} +} -func (m *Counter) GetValue() float64 { - if m != nil && m.Value != nil { - return *m.Value +func (x *Counter) GetValue() float64 { + if x != nil && x.Value != nil { + return *x.Value } return 0 } -func (m *Counter) GetExemplar() *Exemplar { - if m != nil { - return m.Exemplar +func (x *Counter) GetExemplar() *Exemplar { + if x != nil { + return x.Exemplar } return nil } type Quantile struct { - Quantile *float64 `protobuf:"fixed64,1,opt,name=quantile" json:"quantile,omitempty"` - Value *float64 `protobuf:"fixed64,2,opt,name=value" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *Quantile) Reset() { *m = Quantile{} } -func (m *Quantile) String() string { return proto.CompactTextString(m) } -func (*Quantile) ProtoMessage() {} -func (*Quantile) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{3} + Quantile *float64 `protobuf:"fixed64,1,opt,name=quantile" json:"quantile,omitempty"` + Value *float64 `protobuf:"fixed64,2,opt,name=value" json:"value,omitempty"` } -func (m *Quantile) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Quantile.Unmarshal(m, b) -} -func (m *Quantile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Quantile.Marshal(b, m, deterministic) -} -func (m *Quantile) XXX_Merge(src proto.Message) { - xxx_messageInfo_Quantile.Merge(m, src) +func (x *Quantile) Reset() { + *x = Quantile{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Quantile) XXX_Size() int { - return xxx_messageInfo_Quantile.Size(m) + +func (x *Quantile) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Quantile) XXX_DiscardUnknown() { - xxx_messageInfo_Quantile.DiscardUnknown(m) + +func (*Quantile) ProtoMessage() {} + +func (x *Quantile) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Quantile proto.InternalMessageInfo +// Deprecated: Use Quantile.ProtoReflect.Descriptor instead. +func (*Quantile) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{3} +} -func (m *Quantile) GetQuantile() float64 { - if m != nil && m.Quantile != nil { - return *m.Quantile +func (x *Quantile) GetQuantile() float64 { + if x != nil && x.Quantile != nil { + return *x.Quantile } return 0 } -func (m *Quantile) GetValue() float64 { - if m != nil && m.Value != nil { - return *m.Value +func (x *Quantile) GetValue() float64 { + if x != nil && x.Value != nil { + return *x.Value } return 0 } type Summary struct { - SampleCount *uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount" json:"sample_count,omitempty"` - SampleSum *float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum" json:"sample_sum,omitempty"` - Quantile []*Quantile `protobuf:"bytes,3,rep,name=quantile" json:"quantile,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *Summary) Reset() { *m = Summary{} } -func (m *Summary) String() string { return proto.CompactTextString(m) } -func (*Summary) ProtoMessage() {} -func (*Summary) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{4} + SampleCount *uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount" json:"sample_count,omitempty"` + SampleSum *float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum" json:"sample_sum,omitempty"` + Quantile []*Quantile `protobuf:"bytes,3,rep,name=quantile" json:"quantile,omitempty"` } -func (m *Summary) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Summary.Unmarshal(m, b) -} -func (m *Summary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Summary.Marshal(b, m, deterministic) -} -func (m *Summary) XXX_Merge(src proto.Message) { - xxx_messageInfo_Summary.Merge(m, src) +func (x *Summary) Reset() { + *x = Summary{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Summary) XXX_Size() int { - return xxx_messageInfo_Summary.Size(m) + +func (x *Summary) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Summary) XXX_DiscardUnknown() { - xxx_messageInfo_Summary.DiscardUnknown(m) + +func (*Summary) ProtoMessage() {} + +func (x *Summary) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Summary proto.InternalMessageInfo +// Deprecated: Use Summary.ProtoReflect.Descriptor instead. +func (*Summary) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{4} +} -func (m *Summary) GetSampleCount() uint64 { - if m != nil && m.SampleCount != nil { - return *m.SampleCount +func (x *Summary) GetSampleCount() uint64 { + if x != nil && x.SampleCount != nil { + return *x.SampleCount } return 0 } -func (m *Summary) GetSampleSum() float64 { - if m != nil && m.SampleSum != nil { - return *m.SampleSum +func (x *Summary) GetSampleSum() float64 { + if x != nil && x.SampleSum != nil { + return *x.SampleSum } return 0 } -func (m *Summary) GetQuantile() []*Quantile { - if m != nil { - return m.Quantile +func (x *Summary) GetQuantile() []*Quantile { + if x != nil { + return x.Quantile } return nil } type Untyped struct { - Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *Untyped) Reset() { *m = Untyped{} } -func (m *Untyped) String() string { return proto.CompactTextString(m) } -func (*Untyped) ProtoMessage() {} -func (*Untyped) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{5} + Value *float64 `protobuf:"fixed64,1,opt,name=value" json:"value,omitempty"` } -func (m *Untyped) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Untyped.Unmarshal(m, b) -} -func (m *Untyped) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Untyped.Marshal(b, m, deterministic) -} -func (m *Untyped) XXX_Merge(src proto.Message) { - xxx_messageInfo_Untyped.Merge(m, src) +func (x *Untyped) Reset() { + *x = Untyped{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Untyped) XXX_Size() int { - return xxx_messageInfo_Untyped.Size(m) + +func (x *Untyped) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Untyped) XXX_DiscardUnknown() { - xxx_messageInfo_Untyped.DiscardUnknown(m) + +func (*Untyped) ProtoMessage() {} + +func (x *Untyped) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Untyped proto.InternalMessageInfo +// Deprecated: Use Untyped.ProtoReflect.Descriptor instead. +func (*Untyped) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{5} +} -func (m *Untyped) GetValue() float64 { - if m != nil && m.Value != nil { - return *m.Value +func (x *Untyped) GetValue() float64 { + if x != nil && x.Value != nil { + return *x.Value } return 0 } type Histogram struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + SampleCount *uint64 `protobuf:"varint,1,opt,name=sample_count,json=sampleCount" json:"sample_count,omitempty"` - SampleCountFloat *float64 `protobuf:"fixed64,4,opt,name=sample_count_float,json=sampleCountFloat" json:"sample_count_float,omitempty"` + SampleCountFloat *float64 `protobuf:"fixed64,4,opt,name=sample_count_float,json=sampleCountFloat" json:"sample_count_float,omitempty"` // Overrides sample_count if > 0. SampleSum *float64 `protobuf:"fixed64,2,opt,name=sample_sum,json=sampleSum" json:"sample_sum,omitempty"` // Buckets for the conventional histogram. - Bucket []*Bucket `protobuf:"bytes,3,rep,name=bucket" json:"bucket,omitempty"` + Bucket []*Bucket `protobuf:"bytes,3,rep,name=bucket" json:"bucket,omitempty"` // Ordered in increasing order of upper_bound, +Inf bucket is optional. // schema defines the bucket schema. Currently, valid numbers are -4 <= n <= 8. // They are all for base-2 bucket schemas, where 1 is a bucket boundary in each case, and // then each power of two is divided into 2^n logarithmic buckets. // Or in other words, each bucket boundary is the previous boundary times 2^(2^-n). // In the future, more bucket schemas may be added using numbers < -4 or > 8. Schema *int32 `protobuf:"zigzag32,5,opt,name=schema" json:"schema,omitempty"` - ZeroThreshold *float64 `protobuf:"fixed64,6,opt,name=zero_threshold,json=zeroThreshold" json:"zero_threshold,omitempty"` - ZeroCount *uint64 `protobuf:"varint,7,opt,name=zero_count,json=zeroCount" json:"zero_count,omitempty"` - ZeroCountFloat *float64 `protobuf:"fixed64,8,opt,name=zero_count_float,json=zeroCountFloat" json:"zero_count_float,omitempty"` + ZeroThreshold *float64 `protobuf:"fixed64,6,opt,name=zero_threshold,json=zeroThreshold" json:"zero_threshold,omitempty"` // Breadth of the zero bucket. + ZeroCount *uint64 `protobuf:"varint,7,opt,name=zero_count,json=zeroCount" json:"zero_count,omitempty"` // Count in zero bucket. + ZeroCountFloat *float64 `protobuf:"fixed64,8,opt,name=zero_count_float,json=zeroCountFloat" json:"zero_count_float,omitempty"` // Overrides sb_zero_count if > 0. // Negative buckets for the native histogram. NegativeSpan []*BucketSpan `protobuf:"bytes,9,rep,name=negative_span,json=negativeSpan" json:"negative_span,omitempty"` // Use either "negative_delta" or "negative_count", the former for // regular histograms with integer counts, the latter for float // histograms. - NegativeDelta []int64 `protobuf:"zigzag64,10,rep,name=negative_delta,json=negativeDelta" json:"negative_delta,omitempty"` - NegativeCount []float64 `protobuf:"fixed64,11,rep,name=negative_count,json=negativeCount" json:"negative_count,omitempty"` + NegativeDelta []int64 `protobuf:"zigzag64,10,rep,name=negative_delta,json=negativeDelta" json:"negative_delta,omitempty"` // Count delta of each bucket compared to previous one (or to zero for 1st bucket). + NegativeCount []float64 `protobuf:"fixed64,11,rep,name=negative_count,json=negativeCount" json:"negative_count,omitempty"` // Absolute count of each bucket. // Positive buckets for the native histogram. PositiveSpan []*BucketSpan `protobuf:"bytes,12,rep,name=positive_span,json=positiveSpan" json:"positive_span,omitempty"` // Use either "positive_delta" or "positive_count", the former for // regular histograms with integer counts, the latter for float // histograms. - PositiveDelta []int64 `protobuf:"zigzag64,13,rep,name=positive_delta,json=positiveDelta" json:"positive_delta,omitempty"` - PositiveCount []float64 `protobuf:"fixed64,14,rep,name=positive_count,json=positiveCount" json:"positive_count,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + PositiveDelta []int64 `protobuf:"zigzag64,13,rep,name=positive_delta,json=positiveDelta" json:"positive_delta,omitempty"` // Count delta of each bucket compared to previous one (or to zero for 1st bucket). + PositiveCount []float64 `protobuf:"fixed64,14,rep,name=positive_count,json=positiveCount" json:"positive_count,omitempty"` // Absolute count of each bucket. } -func (m *Histogram) Reset() { *m = Histogram{} } -func (m *Histogram) String() string { return proto.CompactTextString(m) } -func (*Histogram) ProtoMessage() {} -func (*Histogram) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{6} +func (x *Histogram) Reset() { + *x = Histogram{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Histogram) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Histogram.Unmarshal(m, b) +func (x *Histogram) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Histogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Histogram.Marshal(b, m, deterministic) -} -func (m *Histogram) XXX_Merge(src proto.Message) { - xxx_messageInfo_Histogram.Merge(m, src) -} -func (m *Histogram) XXX_Size() int { - return xxx_messageInfo_Histogram.Size(m) -} -func (m *Histogram) XXX_DiscardUnknown() { - xxx_messageInfo_Histogram.DiscardUnknown(m) + +func (*Histogram) ProtoMessage() {} + +func (x *Histogram) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Histogram proto.InternalMessageInfo +// Deprecated: Use Histogram.ProtoReflect.Descriptor instead. +func (*Histogram) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{6} +} -func (m *Histogram) GetSampleCount() uint64 { - if m != nil && m.SampleCount != nil { - return *m.SampleCount +func (x *Histogram) GetSampleCount() uint64 { + if x != nil && x.SampleCount != nil { + return *x.SampleCount } return 0 } -func (m *Histogram) GetSampleCountFloat() float64 { - if m != nil && m.SampleCountFloat != nil { - return *m.SampleCountFloat +func (x *Histogram) GetSampleCountFloat() float64 { + if x != nil && x.SampleCountFloat != nil { + return *x.SampleCountFloat } return 0 } -func (m *Histogram) GetSampleSum() float64 { - if m != nil && m.SampleSum != nil { - return *m.SampleSum +func (x *Histogram) GetSampleSum() float64 { + if x != nil && x.SampleSum != nil { + return *x.SampleSum } return 0 } -func (m *Histogram) GetBucket() []*Bucket { - if m != nil { - return m.Bucket +func (x *Histogram) GetBucket() []*Bucket { + if x != nil { + return x.Bucket } return nil } -func (m *Histogram) GetSchema() int32 { - if m != nil && m.Schema != nil { - return *m.Schema +func (x *Histogram) GetSchema() int32 { + if x != nil && x.Schema != nil { + return *x.Schema } return 0 } -func (m *Histogram) GetZeroThreshold() float64 { - if m != nil && m.ZeroThreshold != nil { - return *m.ZeroThreshold +func (x *Histogram) GetZeroThreshold() float64 { + if x != nil && x.ZeroThreshold != nil { + return *x.ZeroThreshold } return 0 } -func (m *Histogram) GetZeroCount() uint64 { - if m != nil && m.ZeroCount != nil { - return *m.ZeroCount +func (x *Histogram) GetZeroCount() uint64 { + if x != nil && x.ZeroCount != nil { + return *x.ZeroCount } return 0 } -func (m *Histogram) GetZeroCountFloat() float64 { - if m != nil && m.ZeroCountFloat != nil { - return *m.ZeroCountFloat +func (x *Histogram) GetZeroCountFloat() float64 { + if x != nil && x.ZeroCountFloat != nil { + return *x.ZeroCountFloat } return 0 } -func (m *Histogram) GetNegativeSpan() []*BucketSpan { - if m != nil { - return m.NegativeSpan +func (x *Histogram) GetNegativeSpan() []*BucketSpan { + if x != nil { + return x.NegativeSpan } return nil } -func (m *Histogram) GetNegativeDelta() []int64 { - if m != nil { - return m.NegativeDelta +func (x *Histogram) GetNegativeDelta() []int64 { + if x != nil { + return x.NegativeDelta } return nil } -func (m *Histogram) GetNegativeCount() []float64 { - if m != nil { - return m.NegativeCount +func (x *Histogram) GetNegativeCount() []float64 { + if x != nil { + return x.NegativeCount } return nil } -func (m *Histogram) GetPositiveSpan() []*BucketSpan { - if m != nil { - return m.PositiveSpan +func (x *Histogram) GetPositiveSpan() []*BucketSpan { + if x != nil { + return x.PositiveSpan } return nil } -func (m *Histogram) GetPositiveDelta() []int64 { - if m != nil { - return m.PositiveDelta +func (x *Histogram) GetPositiveDelta() []int64 { + if x != nil { + return x.PositiveDelta } return nil } -func (m *Histogram) GetPositiveCount() []float64 { - if m != nil { - return m.PositiveCount +func (x *Histogram) GetPositiveCount() []float64 { + if x != nil { + return x.PositiveCount } return nil } @@ -513,64 +598,72 @@ func (m *Histogram) GetPositiveCount() []float64 { // A Bucket of a conventional histogram, each of which is treated as // an individual counter-like time series by Prometheus. type Bucket struct { - CumulativeCount *uint64 `protobuf:"varint,1,opt,name=cumulative_count,json=cumulativeCount" json:"cumulative_count,omitempty"` - CumulativeCountFloat *float64 `protobuf:"fixed64,4,opt,name=cumulative_count_float,json=cumulativeCountFloat" json:"cumulative_count_float,omitempty"` - UpperBound *float64 `protobuf:"fixed64,2,opt,name=upper_bound,json=upperBound" json:"upper_bound,omitempty"` + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + CumulativeCount *uint64 `protobuf:"varint,1,opt,name=cumulative_count,json=cumulativeCount" json:"cumulative_count,omitempty"` // Cumulative in increasing order. + CumulativeCountFloat *float64 `protobuf:"fixed64,4,opt,name=cumulative_count_float,json=cumulativeCountFloat" json:"cumulative_count_float,omitempty"` // Overrides cumulative_count if > 0. + UpperBound *float64 `protobuf:"fixed64,2,opt,name=upper_bound,json=upperBound" json:"upper_bound,omitempty"` // Inclusive. Exemplar *Exemplar `protobuf:"bytes,3,opt,name=exemplar" json:"exemplar,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` } -func (m *Bucket) Reset() { *m = Bucket{} } -func (m *Bucket) String() string { return proto.CompactTextString(m) } -func (*Bucket) ProtoMessage() {} -func (*Bucket) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{7} +func (x *Bucket) Reset() { + *x = Bucket{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Bucket) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Bucket.Unmarshal(m, b) -} -func (m *Bucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Bucket.Marshal(b, m, deterministic) -} -func (m *Bucket) XXX_Merge(src proto.Message) { - xxx_messageInfo_Bucket.Merge(m, src) -} -func (m *Bucket) XXX_Size() int { - return xxx_messageInfo_Bucket.Size(m) +func (x *Bucket) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Bucket) XXX_DiscardUnknown() { - xxx_messageInfo_Bucket.DiscardUnknown(m) + +func (*Bucket) ProtoMessage() {} + +func (x *Bucket) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Bucket proto.InternalMessageInfo +// Deprecated: Use Bucket.ProtoReflect.Descriptor instead. +func (*Bucket) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{7} +} -func (m *Bucket) GetCumulativeCount() uint64 { - if m != nil && m.CumulativeCount != nil { - return *m.CumulativeCount +func (x *Bucket) GetCumulativeCount() uint64 { + if x != nil && x.CumulativeCount != nil { + return *x.CumulativeCount } return 0 } -func (m *Bucket) GetCumulativeCountFloat() float64 { - if m != nil && m.CumulativeCountFloat != nil { - return *m.CumulativeCountFloat +func (x *Bucket) GetCumulativeCountFloat() float64 { + if x != nil && x.CumulativeCountFloat != nil { + return *x.CumulativeCountFloat } return 0 } -func (m *Bucket) GetUpperBound() float64 { - if m != nil && m.UpperBound != nil { - return *m.UpperBound +func (x *Bucket) GetUpperBound() float64 { + if x != nil && x.UpperBound != nil { + return *x.UpperBound } return 0 } -func (m *Bucket) GetExemplar() *Exemplar { - if m != nil { - return m.Exemplar +func (x *Bucket) GetExemplar() *Exemplar { + if x != nil { + return x.Exemplar } return nil } @@ -582,333 +675,658 @@ func (m *Bucket) GetExemplar() *Exemplar { // structured here (with all the buckets in a single array separate // from the Spans). type BucketSpan struct { - Offset *int32 `protobuf:"zigzag32,1,opt,name=offset" json:"offset,omitempty"` - Length *uint32 `protobuf:"varint,2,opt,name=length" json:"length,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *BucketSpan) Reset() { *m = BucketSpan{} } -func (m *BucketSpan) String() string { return proto.CompactTextString(m) } -func (*BucketSpan) ProtoMessage() {} -func (*BucketSpan) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{8} + Offset *int32 `protobuf:"zigzag32,1,opt,name=offset" json:"offset,omitempty"` // Gap to previous span, or starting point for 1st span (which can be negative). + Length *uint32 `protobuf:"varint,2,opt,name=length" json:"length,omitempty"` // Length of consecutive buckets. } -func (m *BucketSpan) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_BucketSpan.Unmarshal(m, b) -} -func (m *BucketSpan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_BucketSpan.Marshal(b, m, deterministic) -} -func (m *BucketSpan) XXX_Merge(src proto.Message) { - xxx_messageInfo_BucketSpan.Merge(m, src) +func (x *BucketSpan) Reset() { + *x = BucketSpan{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *BucketSpan) XXX_Size() int { - return xxx_messageInfo_BucketSpan.Size(m) + +func (x *BucketSpan) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *BucketSpan) XXX_DiscardUnknown() { - xxx_messageInfo_BucketSpan.DiscardUnknown(m) + +func (*BucketSpan) ProtoMessage() {} + +func (x *BucketSpan) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_BucketSpan proto.InternalMessageInfo +// Deprecated: Use BucketSpan.ProtoReflect.Descriptor instead. +func (*BucketSpan) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{8} +} -func (m *BucketSpan) GetOffset() int32 { - if m != nil && m.Offset != nil { - return *m.Offset +func (x *BucketSpan) GetOffset() int32 { + if x != nil && x.Offset != nil { + return *x.Offset } return 0 } -func (m *BucketSpan) GetLength() uint32 { - if m != nil && m.Length != nil { - return *m.Length +func (x *BucketSpan) GetLength() uint32 { + if x != nil && x.Length != nil { + return *x.Length } return 0 } type Exemplar struct { - Label []*LabelPair `protobuf:"bytes,1,rep,name=label" json:"label,omitempty"` - Value *float64 `protobuf:"fixed64,2,opt,name=value" json:"value,omitempty"` - Timestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=timestamp" json:"timestamp,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields -func (m *Exemplar) Reset() { *m = Exemplar{} } -func (m *Exemplar) String() string { return proto.CompactTextString(m) } -func (*Exemplar) ProtoMessage() {} -func (*Exemplar) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{9} + Label []*LabelPair `protobuf:"bytes,1,rep,name=label" json:"label,omitempty"` + Value *float64 `protobuf:"fixed64,2,opt,name=value" json:"value,omitempty"` + Timestamp *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=timestamp" json:"timestamp,omitempty"` // OpenMetrics-style. } -func (m *Exemplar) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Exemplar.Unmarshal(m, b) -} -func (m *Exemplar) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Exemplar.Marshal(b, m, deterministic) -} -func (m *Exemplar) XXX_Merge(src proto.Message) { - xxx_messageInfo_Exemplar.Merge(m, src) +func (x *Exemplar) Reset() { + *x = Exemplar{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Exemplar) XXX_Size() int { - return xxx_messageInfo_Exemplar.Size(m) + +func (x *Exemplar) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Exemplar) XXX_DiscardUnknown() { - xxx_messageInfo_Exemplar.DiscardUnknown(m) + +func (*Exemplar) ProtoMessage() {} + +func (x *Exemplar) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Exemplar proto.InternalMessageInfo +// Deprecated: Use Exemplar.ProtoReflect.Descriptor instead. +func (*Exemplar) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{9} +} -func (m *Exemplar) GetLabel() []*LabelPair { - if m != nil { - return m.Label +func (x *Exemplar) GetLabel() []*LabelPair { + if x != nil { + return x.Label } return nil } -func (m *Exemplar) GetValue() float64 { - if m != nil && m.Value != nil { - return *m.Value +func (x *Exemplar) GetValue() float64 { + if x != nil && x.Value != nil { + return *x.Value } return 0 } -func (m *Exemplar) GetTimestamp() *timestamp.Timestamp { - if m != nil { - return m.Timestamp +func (x *Exemplar) GetTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.Timestamp } return nil } type Metric struct { - Label []*LabelPair `protobuf:"bytes,1,rep,name=label" json:"label,omitempty"` - Gauge *Gauge `protobuf:"bytes,2,opt,name=gauge" json:"gauge,omitempty"` - Counter *Counter `protobuf:"bytes,3,opt,name=counter" json:"counter,omitempty"` - Summary *Summary `protobuf:"bytes,4,opt,name=summary" json:"summary,omitempty"` - Untyped *Untyped `protobuf:"bytes,5,opt,name=untyped" json:"untyped,omitempty"` - Histogram *Histogram `protobuf:"bytes,7,opt,name=histogram" json:"histogram,omitempty"` - TimestampMs *int64 `protobuf:"varint,6,opt,name=timestamp_ms,json=timestampMs" json:"timestamp_ms,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Metric) Reset() { *m = Metric{} } -func (m *Metric) String() string { return proto.CompactTextString(m) } -func (*Metric) ProtoMessage() {} -func (*Metric) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{10} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Label []*LabelPair `protobuf:"bytes,1,rep,name=label" json:"label,omitempty"` + Gauge *Gauge `protobuf:"bytes,2,opt,name=gauge" json:"gauge,omitempty"` + Counter *Counter `protobuf:"bytes,3,opt,name=counter" json:"counter,omitempty"` + Summary *Summary `protobuf:"bytes,4,opt,name=summary" json:"summary,omitempty"` + Untyped *Untyped `protobuf:"bytes,5,opt,name=untyped" json:"untyped,omitempty"` + Histogram *Histogram `protobuf:"bytes,7,opt,name=histogram" json:"histogram,omitempty"` + TimestampMs *int64 `protobuf:"varint,6,opt,name=timestamp_ms,json=timestampMs" json:"timestamp_ms,omitempty"` +} + +func (x *Metric) Reset() { + *x = Metric{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *Metric) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Metric.Unmarshal(m, b) +func (x *Metric) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *Metric) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Metric.Marshal(b, m, deterministic) -} -func (m *Metric) XXX_Merge(src proto.Message) { - xxx_messageInfo_Metric.Merge(m, src) -} -func (m *Metric) XXX_Size() int { - return xxx_messageInfo_Metric.Size(m) -} -func (m *Metric) XXX_DiscardUnknown() { - xxx_messageInfo_Metric.DiscardUnknown(m) + +func (*Metric) ProtoMessage() {} + +func (x *Metric) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_Metric proto.InternalMessageInfo +// Deprecated: Use Metric.ProtoReflect.Descriptor instead. +func (*Metric) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{10} +} -func (m *Metric) GetLabel() []*LabelPair { - if m != nil { - return m.Label +func (x *Metric) GetLabel() []*LabelPair { + if x != nil { + return x.Label } return nil } -func (m *Metric) GetGauge() *Gauge { - if m != nil { - return m.Gauge +func (x *Metric) GetGauge() *Gauge { + if x != nil { + return x.Gauge } return nil } -func (m *Metric) GetCounter() *Counter { - if m != nil { - return m.Counter +func (x *Metric) GetCounter() *Counter { + if x != nil { + return x.Counter } return nil } -func (m *Metric) GetSummary() *Summary { - if m != nil { - return m.Summary +func (x *Metric) GetSummary() *Summary { + if x != nil { + return x.Summary } return nil } -func (m *Metric) GetUntyped() *Untyped { - if m != nil { - return m.Untyped +func (x *Metric) GetUntyped() *Untyped { + if x != nil { + return x.Untyped } return nil } -func (m *Metric) GetHistogram() *Histogram { - if m != nil { - return m.Histogram +func (x *Metric) GetHistogram() *Histogram { + if x != nil { + return x.Histogram } return nil } -func (m *Metric) GetTimestampMs() int64 { - if m != nil && m.TimestampMs != nil { - return *m.TimestampMs +func (x *Metric) GetTimestampMs() int64 { + if x != nil && x.TimestampMs != nil { + return *x.TimestampMs } return 0 } type MetricFamily struct { - Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Help *string `protobuf:"bytes,2,opt,name=help" json:"help,omitempty"` - Type *MetricType `protobuf:"varint,3,opt,name=type,enum=io.prometheus.client.MetricType" json:"type,omitempty"` - Metric []*Metric `protobuf:"bytes,4,rep,name=metric" json:"metric,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *MetricFamily) Reset() { *m = MetricFamily{} } -func (m *MetricFamily) String() string { return proto.CompactTextString(m) } -func (*MetricFamily) ProtoMessage() {} -func (*MetricFamily) Descriptor() ([]byte, []int) { - return fileDescriptor_d1e5ddb18987a258, []int{11} + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Help *string `protobuf:"bytes,2,opt,name=help" json:"help,omitempty"` + Type *MetricType `protobuf:"varint,3,opt,name=type,enum=io.prometheus.client.MetricType" json:"type,omitempty"` + Metric []*Metric `protobuf:"bytes,4,rep,name=metric" json:"metric,omitempty"` +} + +func (x *MetricFamily) Reset() { + *x = MetricFamily{} + if protoimpl.UnsafeEnabled { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } } -func (m *MetricFamily) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_MetricFamily.Unmarshal(m, b) -} -func (m *MetricFamily) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_MetricFamily.Marshal(b, m, deterministic) -} -func (m *MetricFamily) XXX_Merge(src proto.Message) { - xxx_messageInfo_MetricFamily.Merge(m, src) -} -func (m *MetricFamily) XXX_Size() int { - return xxx_messageInfo_MetricFamily.Size(m) +func (x *MetricFamily) String() string { + return protoimpl.X.MessageStringOf(x) } -func (m *MetricFamily) XXX_DiscardUnknown() { - xxx_messageInfo_MetricFamily.DiscardUnknown(m) + +func (*MetricFamily) ProtoMessage() {} + +func (x *MetricFamily) ProtoReflect() protoreflect.Message { + mi := &file_io_prometheus_client_metrics_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -var xxx_messageInfo_MetricFamily proto.InternalMessageInfo +// Deprecated: Use MetricFamily.ProtoReflect.Descriptor instead. +func (*MetricFamily) Descriptor() ([]byte, []int) { + return file_io_prometheus_client_metrics_proto_rawDescGZIP(), []int{11} +} -func (m *MetricFamily) GetName() string { - if m != nil && m.Name != nil { - return *m.Name +func (x *MetricFamily) GetName() string { + if x != nil && x.Name != nil { + return *x.Name } return "" } -func (m *MetricFamily) GetHelp() string { - if m != nil && m.Help != nil { - return *m.Help +func (x *MetricFamily) GetHelp() string { + if x != nil && x.Help != nil { + return *x.Help } return "" } -func (m *MetricFamily) GetType() MetricType { - if m != nil && m.Type != nil { - return *m.Type +func (x *MetricFamily) GetType() MetricType { + if x != nil && x.Type != nil { + return *x.Type } return MetricType_COUNTER } -func (m *MetricFamily) GetMetric() []*Metric { - if m != nil { - return m.Metric +func (x *MetricFamily) GetMetric() []*Metric { + if x != nil { + return x.Metric } return nil } -func init() { - proto.RegisterEnum("io.prometheus.client.MetricType", MetricType_name, MetricType_value) - proto.RegisterType((*LabelPair)(nil), "io.prometheus.client.LabelPair") - proto.RegisterType((*Gauge)(nil), "io.prometheus.client.Gauge") - proto.RegisterType((*Counter)(nil), "io.prometheus.client.Counter") - proto.RegisterType((*Quantile)(nil), "io.prometheus.client.Quantile") - proto.RegisterType((*Summary)(nil), "io.prometheus.client.Summary") - proto.RegisterType((*Untyped)(nil), "io.prometheus.client.Untyped") - proto.RegisterType((*Histogram)(nil), "io.prometheus.client.Histogram") - proto.RegisterType((*Bucket)(nil), "io.prometheus.client.Bucket") - proto.RegisterType((*BucketSpan)(nil), "io.prometheus.client.BucketSpan") - proto.RegisterType((*Exemplar)(nil), "io.prometheus.client.Exemplar") - proto.RegisterType((*Metric)(nil), "io.prometheus.client.Metric") - proto.RegisterType((*MetricFamily)(nil), "io.prometheus.client.MetricFamily") -} - -func init() { - proto.RegisterFile("io/prometheus/client/metrics.proto", fileDescriptor_d1e5ddb18987a258) -} - -var fileDescriptor_d1e5ddb18987a258 = []byte{ - // 896 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdd, 0x8e, 0xdb, 0x44, - 0x18, 0xc5, 0x9b, 0x5f, 0x7f, 0xd9, 0x6c, 0xd3, 0x61, 0x55, 0x59, 0x0b, 0xcb, 0x06, 0x4b, 0x48, - 0x0b, 0x42, 0x8e, 0x40, 0x5b, 0x81, 0x0a, 0x5c, 0xec, 0xb6, 0xe9, 0x16, 0x89, 0xb4, 0x65, 0x92, - 0x5c, 0x14, 0x2e, 0xac, 0x49, 0x32, 0xeb, 0x58, 0x78, 0x3c, 0xc6, 0x1e, 0x57, 0x2c, 0x2f, 0xc0, - 0x35, 0xaf, 0xc0, 0xc3, 0xf0, 0x22, 0x3c, 0x08, 0x68, 0xfe, 0xec, 0xdd, 0xe2, 0x94, 0xd2, 0x3b, - 0x7f, 0x67, 0xce, 0xf7, 0xcd, 0x39, 0xe3, 0xc9, 0x71, 0xc0, 0x8f, 0xf9, 0x24, 0xcb, 0x39, 0xa3, - 0x62, 0x4b, 0xcb, 0x62, 0xb2, 0x4e, 0x62, 0x9a, 0x8a, 0x09, 0xa3, 0x22, 0x8f, 0xd7, 0x45, 0x90, - 0xe5, 0x5c, 0x70, 0x74, 0x18, 0xf3, 0xa0, 0xe6, 0x04, 0x9a, 0x73, 0x74, 0x12, 0x71, 0x1e, 0x25, - 0x74, 0xa2, 0x38, 0xab, 0xf2, 0x6a, 0x22, 0x62, 0x46, 0x0b, 0x41, 0x58, 0xa6, 0xdb, 0xfc, 0xfb, - 0xe0, 0x7e, 0x47, 0x56, 0x34, 0x79, 0x4e, 0xe2, 0x1c, 0x21, 0x68, 0xa7, 0x84, 0x51, 0xcf, 0x19, - 0x3b, 0xa7, 0x2e, 0x56, 0xcf, 0xe8, 0x10, 0x3a, 0x2f, 0x49, 0x52, 0x52, 0x6f, 0x4f, 0x81, 0xba, - 0xf0, 0x8f, 0xa1, 0x73, 0x49, 0xca, 0xe8, 0xc6, 0xb2, 0xec, 0x71, 0xec, 0xf2, 0x8f, 0xd0, 0x7b, - 0xc8, 0xcb, 0x54, 0xd0, 0xbc, 0x99, 0x80, 0x1e, 0x40, 0x9f, 0xfe, 0x42, 0x59, 0x96, 0x90, 0x5c, - 0x0d, 0x1e, 0x7c, 0xfe, 0x41, 0xd0, 0x64, 0x20, 0x98, 0x1a, 0x16, 0xae, 0xf8, 0xfe, 0xd7, 0xd0, - 0xff, 0xbe, 0x24, 0xa9, 0x88, 0x13, 0x8a, 0x8e, 0xa0, 0xff, 0xb3, 0x79, 0x36, 0x1b, 0x54, 0xf5, - 0x6d, 0xe5, 0x95, 0xb4, 0xdf, 0x1c, 0xe8, 0xcd, 0x4b, 0xc6, 0x48, 0x7e, 0x8d, 0x3e, 0x84, 0xfd, - 0x82, 0xb0, 0x2c, 0xa1, 0xe1, 0x5a, 0xaa, 0x55, 0x13, 0xda, 0x78, 0xa0, 0x31, 0x65, 0x00, 0x1d, - 0x03, 0x18, 0x4a, 0x51, 0x32, 0x33, 0xc9, 0xd5, 0xc8, 0xbc, 0x64, 0xd2, 0x47, 0xb5, 0x7f, 0x6b, - 0xdc, 0xda, 0xed, 0xc3, 0x2a, 0xae, 0xf5, 0xf9, 0x27, 0xd0, 0x5b, 0xa6, 0xe2, 0x3a, 0xa3, 0x9b, - 0x1d, 0xa7, 0xf8, 0x57, 0x1b, 0xdc, 0x27, 0x71, 0x21, 0x78, 0x94, 0x13, 0xf6, 0x26, 0x62, 0x3f, - 0x05, 0x74, 0x93, 0x12, 0x5e, 0x25, 0x9c, 0x08, 0xaf, 0xad, 0x66, 0x8e, 0x6e, 0x10, 0x1f, 0x4b, - 0xfc, 0xbf, 0xac, 0x9d, 0x41, 0x77, 0x55, 0xae, 0x7f, 0xa2, 0xc2, 0x18, 0x7b, 0xbf, 0xd9, 0xd8, - 0x85, 0xe2, 0x60, 0xc3, 0x45, 0xf7, 0xa0, 0x5b, 0xac, 0xb7, 0x94, 0x11, 0xaf, 0x33, 0x76, 0x4e, - 0xef, 0x62, 0x53, 0xa1, 0x8f, 0xe0, 0xe0, 0x57, 0x9a, 0xf3, 0x50, 0x6c, 0x73, 0x5a, 0x6c, 0x79, - 0xb2, 0xf1, 0xba, 0x6a, 0xc3, 0xa1, 0x44, 0x17, 0x16, 0x94, 0x9a, 0x14, 0x4d, 0x5b, 0xec, 0x29, - 0x8b, 0xae, 0x44, 0xb4, 0xc1, 0x53, 0x18, 0xd5, 0xcb, 0xc6, 0x5e, 0x5f, 0xcd, 0x39, 0xa8, 0x48, - 0xda, 0xdc, 0x14, 0x86, 0x29, 0x8d, 0x88, 0x88, 0x5f, 0xd2, 0xb0, 0xc8, 0x48, 0xea, 0xb9, 0xca, - 0xc4, 0xf8, 0x75, 0x26, 0xe6, 0x19, 0x49, 0xf1, 0xbe, 0x6d, 0x93, 0x95, 0x94, 0x5d, 0x8d, 0xd9, - 0xd0, 0x44, 0x10, 0x0f, 0xc6, 0xad, 0x53, 0x84, 0xab, 0xe1, 0x8f, 0x24, 0x78, 0x8b, 0xa6, 0xa5, - 0x0f, 0xc6, 0x2d, 0xe9, 0xce, 0xa2, 0x5a, 0xfe, 0x14, 0x86, 0x19, 0x2f, 0xe2, 0x5a, 0xd4, 0xfe, - 0x9b, 0x8a, 0xb2, 0x6d, 0x56, 0x54, 0x35, 0x46, 0x8b, 0x1a, 0x6a, 0x51, 0x16, 0xad, 0x44, 0x55, - 0x34, 0x2d, 0xea, 0x40, 0x8b, 0xb2, 0xa8, 0x12, 0xe5, 0xff, 0xe9, 0x40, 0x57, 0x6f, 0x85, 0x3e, - 0x86, 0xd1, 0xba, 0x64, 0x65, 0x72, 0xd3, 0x88, 0xbe, 0x66, 0x77, 0x6a, 0x5c, 0x5b, 0x39, 0x83, - 0x7b, 0xaf, 0x52, 0x6f, 0x5d, 0xb7, 0xc3, 0x57, 0x1a, 0xf4, 0x5b, 0x39, 0x81, 0x41, 0x99, 0x65, - 0x34, 0x0f, 0x57, 0xbc, 0x4c, 0x37, 0xe6, 0xce, 0x81, 0x82, 0x2e, 0x24, 0x72, 0x2b, 0x17, 0x5a, - 0xff, 0x3b, 0x17, 0xa0, 0x3e, 0x32, 0x79, 0x11, 0xf9, 0xd5, 0x55, 0x41, 0xb5, 0x83, 0xbb, 0xd8, - 0x54, 0x12, 0x4f, 0x68, 0x1a, 0x89, 0xad, 0xda, 0x7d, 0x88, 0x4d, 0xe5, 0xff, 0xee, 0x40, 0xdf, - 0x0e, 0x45, 0xf7, 0xa1, 0x93, 0xc8, 0x54, 0xf4, 0x1c, 0xf5, 0x82, 0x4e, 0x9a, 0x35, 0x54, 0xc1, - 0x89, 0x35, 0xbb, 0x39, 0x71, 0xd0, 0x97, 0xe0, 0x56, 0xa9, 0x6b, 0x4c, 0x1d, 0x05, 0x3a, 0x97, - 0x03, 0x9b, 0xcb, 0xc1, 0xc2, 0x32, 0x70, 0x4d, 0xf6, 0xff, 0xde, 0x83, 0xee, 0x4c, 0xa5, 0xfc, - 0xdb, 0x2a, 0xfa, 0x0c, 0x3a, 0x91, 0xcc, 0x69, 0x13, 0xb2, 0xef, 0x35, 0xb7, 0xa9, 0x28, 0xc7, - 0x9a, 0x89, 0xbe, 0x80, 0xde, 0x5a, 0x67, 0xb7, 0x11, 0x7b, 0xdc, 0xdc, 0x64, 0x02, 0x1e, 0x5b, - 0xb6, 0x6c, 0x2c, 0x74, 0xb0, 0xaa, 0x3b, 0xb0, 0xb3, 0xd1, 0xa4, 0x2f, 0xb6, 0x6c, 0xd9, 0x58, - 0xea, 0x20, 0x54, 0xa1, 0xb1, 0xb3, 0xd1, 0xa4, 0x25, 0xb6, 0x6c, 0xf4, 0x0d, 0xb8, 0x5b, 0x9b, - 0x8f, 0x2a, 0x2c, 0x76, 0x1e, 0x4c, 0x15, 0xa3, 0xb8, 0xee, 0x90, 0x89, 0x5a, 0x9d, 0x75, 0xc8, - 0x0a, 0x95, 0x48, 0x2d, 0x3c, 0xa8, 0xb0, 0x59, 0xe1, 0xff, 0xe1, 0xc0, 0xbe, 0x7e, 0x03, 0x8f, - 0x09, 0x8b, 0x93, 0xeb, 0xc6, 0x4f, 0x24, 0x82, 0xf6, 0x96, 0x26, 0x99, 0xf9, 0x42, 0xaa, 0x67, - 0x74, 0x06, 0x6d, 0xa9, 0x51, 0x1d, 0xe1, 0xc1, 0xae, 0x5f, 0xb8, 0x9e, 0xbc, 0xb8, 0xce, 0x28, - 0x56, 0x6c, 0x99, 0xb9, 0xfa, 0xab, 0xee, 0xb5, 0x5f, 0x97, 0xb9, 0xba, 0x0f, 0x1b, 0xee, 0x27, - 0x2b, 0x80, 0x7a, 0x12, 0x1a, 0x40, 0xef, 0xe1, 0xb3, 0xe5, 0xd3, 0xc5, 0x14, 0x8f, 0xde, 0x41, - 0x2e, 0x74, 0x2e, 0xcf, 0x97, 0x97, 0xd3, 0x91, 0x23, 0xf1, 0xf9, 0x72, 0x36, 0x3b, 0xc7, 0x2f, - 0x46, 0x7b, 0xb2, 0x58, 0x3e, 0x5d, 0xbc, 0x78, 0x3e, 0x7d, 0x34, 0x6a, 0xa1, 0x21, 0xb8, 0x4f, - 0xbe, 0x9d, 0x2f, 0x9e, 0x5d, 0xe2, 0xf3, 0xd9, 0xa8, 0x8d, 0xde, 0x85, 0x3b, 0xaa, 0x27, 0xac, - 0xc1, 0xce, 0x05, 0x86, 0xc6, 0x3f, 0x18, 0x3f, 0x3c, 0x88, 0x62, 0xb1, 0x2d, 0x57, 0xc1, 0x9a, - 0xb3, 0x7f, 0xff, 0x45, 0x09, 0x19, 0xdf, 0xd0, 0x64, 0x12, 0xf1, 0xaf, 0x62, 0x1e, 0xd6, 0xab, - 0xa1, 0x5e, 0xfd, 0x27, 0x00, 0x00, 0xff, 0xff, 0x16, 0x77, 0x81, 0x98, 0xd7, 0x08, 0x00, 0x00, +var File_io_prometheus_client_metrics_proto protoreflect.FileDescriptor + +var file_io_prometheus_client_metrics_proto_rawDesc = []byte{ + 0x0a, 0x22, 0x69, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2f, + 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x14, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, + 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x35, 0x0a, 0x09, 0x4c, + 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x22, 0x1d, 0x0a, 0x05, 0x47, 0x61, 0x75, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x22, 0x5b, 0x0a, 0x07, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, + 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x22, 0x3c, + 0x0a, 0x08, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x71, 0x75, + 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x08, 0x71, 0x75, + 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x87, 0x01, 0x0a, + 0x07, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, + 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, + 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, + 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x73, 0x75, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, + 0x09, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x53, 0x75, 0x6d, 0x12, 0x3a, 0x0a, 0x08, 0x71, 0x75, + 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, + 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, + 0x65, 0x6e, 0x74, 0x2e, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x52, 0x08, 0x71, 0x75, + 0x61, 0x6e, 0x74, 0x69, 0x6c, 0x65, 0x22, 0x1f, 0x0a, 0x07, 0x55, 0x6e, 0x74, 0x79, 0x70, 0x65, + 0x64, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xe3, 0x04, 0x0a, 0x09, 0x48, 0x69, 0x73, 0x74, + 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, + 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x73, 0x61, 0x6d, + 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x73, 0x61, 0x6d, 0x70, + 0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x01, 0x52, 0x10, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, + 0x74, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, + 0x5f, 0x73, 0x75, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x73, 0x61, 0x6d, 0x70, + 0x6c, 0x65, 0x53, 0x75, 0x6d, 0x12, 0x34, 0x0a, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x18, + 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, + 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x42, 0x75, 0x63, + 0x6b, 0x65, 0x74, 0x52, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x11, 0x52, 0x06, 0x73, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x7a, 0x65, 0x72, 0x6f, 0x5f, 0x74, 0x68, 0x72, 0x65, + 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0d, 0x7a, 0x65, 0x72, + 0x6f, 0x54, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x7a, 0x65, + 0x72, 0x6f, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, + 0x7a, 0x65, 0x72, 0x6f, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x7a, 0x65, 0x72, + 0x6f, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x08, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x0e, 0x7a, 0x65, 0x72, 0x6f, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x12, 0x45, 0x0a, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, + 0x73, 0x70, 0x61, 0x6e, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x69, 0x6f, 0x2e, + 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, + 0x74, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x0c, 0x6e, 0x65, + 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x6e, 0x65, + 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x74, 0x61, 0x18, 0x0a, 0x20, 0x03, + 0x28, 0x12, 0x52, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x6c, 0x74, + 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, + 0x75, 0x6e, 0x74, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, 0x6e, 0x65, 0x67, 0x61, 0x74, + 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x45, 0x0a, 0x0d, 0x70, 0x6f, 0x73, 0x69, + 0x74, 0x69, 0x76, 0x65, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x20, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, + 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, + 0x6e, 0x52, 0x0c, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, + 0x25, 0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x64, 0x65, 0x6c, 0x74, + 0x61, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x12, 0x52, 0x0d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, + 0x65, 0x44, 0x65, 0x6c, 0x74, 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xc6, 0x01, + 0x0a, 0x06, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x75, 0x6d, 0x75, + 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x04, 0x52, 0x0f, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, + 0x75, 0x6e, 0x74, 0x12, 0x34, 0x0a, 0x16, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, + 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x14, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, + 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x70, + 0x65, 0x72, 0x5f, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, + 0x75, 0x70, 0x70, 0x65, 0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, + 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, + 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x22, 0x3c, 0x0a, 0x0a, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, + 0x53, 0x70, 0x61, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x11, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x22, 0x91, 0x01, 0x0a, 0x08, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x72, 0x12, 0x35, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, + 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, + 0x72, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x38, + 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xff, 0x02, 0x0a, 0x06, 0x4d, 0x65, 0x74, + 0x72, 0x69, 0x63, 0x12, 0x35, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, + 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, + 0x61, 0x69, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x31, 0x0a, 0x05, 0x67, 0x61, + 0x75, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x69, 0x6f, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, + 0x2e, 0x47, 0x61, 0x75, 0x67, 0x65, 0x52, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x12, 0x37, 0x0a, + 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, + 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, + 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x52, 0x07, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x12, 0x37, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, + 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, + 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x53, + 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, + 0x37, 0x0a, 0x07, 0x75, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, + 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x55, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x52, + 0x07, 0x75, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x12, 0x3d, 0x0a, 0x09, 0x68, 0x69, 0x73, 0x74, + 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, + 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, + 0x6e, 0x74, 0x2e, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x09, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x6d, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4d, 0x73, 0x22, 0xa2, 0x01, 0x0a, 0x0c, 0x4d, + 0x65, 0x74, 0x72, 0x69, 0x63, 0x46, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x68, 0x65, 0x6c, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, + 0x65, 0x6c, 0x70, 0x12, 0x34, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x20, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, + 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, + 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x34, 0x0a, 0x06, 0x6d, 0x65, 0x74, + 0x72, 0x69, 0x63, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, + 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2a, + 0x62, 0x0a, 0x0a, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, + 0x07, 0x43, 0x4f, 0x55, 0x4e, 0x54, 0x45, 0x52, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x47, 0x41, + 0x55, 0x47, 0x45, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x55, 0x4d, 0x4d, 0x41, 0x52, 0x59, + 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x54, 0x59, 0x50, 0x45, 0x44, 0x10, 0x03, 0x12, + 0x0d, 0x0a, 0x09, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x04, 0x12, 0x13, + 0x0a, 0x0f, 0x47, 0x41, 0x55, 0x47, 0x45, 0x5f, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, + 0x4d, 0x10, 0x05, 0x42, 0x52, 0x0a, 0x14, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, + 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5a, 0x3a, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, + 0x75, 0x73, 0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2f, + 0x67, 0x6f, 0x3b, 0x69, 0x6f, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, + 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, +} + +var ( + file_io_prometheus_client_metrics_proto_rawDescOnce sync.Once + file_io_prometheus_client_metrics_proto_rawDescData = file_io_prometheus_client_metrics_proto_rawDesc +) + +func file_io_prometheus_client_metrics_proto_rawDescGZIP() []byte { + file_io_prometheus_client_metrics_proto_rawDescOnce.Do(func() { + file_io_prometheus_client_metrics_proto_rawDescData = protoimpl.X.CompressGZIP(file_io_prometheus_client_metrics_proto_rawDescData) + }) + return file_io_prometheus_client_metrics_proto_rawDescData +} + +var file_io_prometheus_client_metrics_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_io_prometheus_client_metrics_proto_msgTypes = make([]protoimpl.MessageInfo, 12) +var file_io_prometheus_client_metrics_proto_goTypes = []interface{}{ + (MetricType)(0), // 0: io.prometheus.client.MetricType + (*LabelPair)(nil), // 1: io.prometheus.client.LabelPair + (*Gauge)(nil), // 2: io.prometheus.client.Gauge + (*Counter)(nil), // 3: io.prometheus.client.Counter + (*Quantile)(nil), // 4: io.prometheus.client.Quantile + (*Summary)(nil), // 5: io.prometheus.client.Summary + (*Untyped)(nil), // 6: io.prometheus.client.Untyped + (*Histogram)(nil), // 7: io.prometheus.client.Histogram + (*Bucket)(nil), // 8: io.prometheus.client.Bucket + (*BucketSpan)(nil), // 9: io.prometheus.client.BucketSpan + (*Exemplar)(nil), // 10: io.prometheus.client.Exemplar + (*Metric)(nil), // 11: io.prometheus.client.Metric + (*MetricFamily)(nil), // 12: io.prometheus.client.MetricFamily + (*timestamppb.Timestamp)(nil), // 13: google.protobuf.Timestamp +} +var file_io_prometheus_client_metrics_proto_depIdxs = []int32{ + 10, // 0: io.prometheus.client.Counter.exemplar:type_name -> io.prometheus.client.Exemplar + 4, // 1: io.prometheus.client.Summary.quantile:type_name -> io.prometheus.client.Quantile + 8, // 2: io.prometheus.client.Histogram.bucket:type_name -> io.prometheus.client.Bucket + 9, // 3: io.prometheus.client.Histogram.negative_span:type_name -> io.prometheus.client.BucketSpan + 9, // 4: io.prometheus.client.Histogram.positive_span:type_name -> io.prometheus.client.BucketSpan + 10, // 5: io.prometheus.client.Bucket.exemplar:type_name -> io.prometheus.client.Exemplar + 1, // 6: io.prometheus.client.Exemplar.label:type_name -> io.prometheus.client.LabelPair + 13, // 7: io.prometheus.client.Exemplar.timestamp:type_name -> google.protobuf.Timestamp + 1, // 8: io.prometheus.client.Metric.label:type_name -> io.prometheus.client.LabelPair + 2, // 9: io.prometheus.client.Metric.gauge:type_name -> io.prometheus.client.Gauge + 3, // 10: io.prometheus.client.Metric.counter:type_name -> io.prometheus.client.Counter + 5, // 11: io.prometheus.client.Metric.summary:type_name -> io.prometheus.client.Summary + 6, // 12: io.prometheus.client.Metric.untyped:type_name -> io.prometheus.client.Untyped + 7, // 13: io.prometheus.client.Metric.histogram:type_name -> io.prometheus.client.Histogram + 0, // 14: io.prometheus.client.MetricFamily.type:type_name -> io.prometheus.client.MetricType + 11, // 15: io.prometheus.client.MetricFamily.metric:type_name -> io.prometheus.client.Metric + 16, // [16:16] is the sub-list for method output_type + 16, // [16:16] is the sub-list for method input_type + 16, // [16:16] is the sub-list for extension type_name + 16, // [16:16] is the sub-list for extension extendee + 0, // [0:16] is the sub-list for field type_name +} + +func init() { file_io_prometheus_client_metrics_proto_init() } +func file_io_prometheus_client_metrics_proto_init() { + if File_io_prometheus_client_metrics_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_io_prometheus_client_metrics_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LabelPair); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Gauge); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Counter); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Quantile); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Summary); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Untyped); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Histogram); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Bucket); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BucketSpan); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Exemplar); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Metric); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_io_prometheus_client_metrics_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MetricFamily); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_io_prometheus_client_metrics_proto_rawDesc, + NumEnums: 1, + NumMessages: 12, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_io_prometheus_client_metrics_proto_goTypes, + DependencyIndexes: file_io_prometheus_client_metrics_proto_depIdxs, + EnumInfos: file_io_prometheus_client_metrics_proto_enumTypes, + MessageInfos: file_io_prometheus_client_metrics_proto_msgTypes, + }.Build() + File_io_prometheus_client_metrics_proto = out.File + file_io_prometheus_client_metrics_proto_rawDesc = nil + file_io_prometheus_client_metrics_proto_goTypes = nil + file_io_prometheus_client_metrics_proto_depIdxs = nil } diff --git a/vendor/github.com/prometheus/common/expfmt/decode.go b/vendor/github.com/prometheus/common/expfmt/decode.go index 7657f841..90639781 100644 --- a/vendor/github.com/prometheus/common/expfmt/decode.go +++ b/vendor/github.com/prometheus/common/expfmt/decode.go @@ -115,32 +115,31 @@ func (d *protoDecoder) Decode(v *dto.MetricFamily) error { // textDecoder implements the Decoder interface for the text protocol. type textDecoder struct { r io.Reader - p TextParser - fams []*dto.MetricFamily + fams map[string]*dto.MetricFamily + err error } // Decode implements the Decoder interface. func (d *textDecoder) Decode(v *dto.MetricFamily) error { - // TODO(fabxc): Wrap this as a line reader to make streaming safer. - if len(d.fams) == 0 { - // No cached metric families, read everything and parse metrics. - fams, err := d.p.TextToMetricFamilies(d.r) - if err != nil { - return err - } - if len(fams) == 0 { - return io.EOF - } - d.fams = make([]*dto.MetricFamily, 0, len(fams)) - for _, f := range fams { - d.fams = append(d.fams, f) + if d.err == nil { + // Read all metrics in one shot. + var p TextParser + d.fams, d.err = p.TextToMetricFamilies(d.r) + // If we don't get an error, store io.EOF for the end. + if d.err == nil { + d.err = io.EOF } } - - *v = *d.fams[0] - d.fams = d.fams[1:] - - return nil + // Pick off one MetricFamily per Decode until there's nothing left. + for key, fam := range d.fams { + v.Name = fam.Name + v.Help = fam.Help + v.Type = fam.Type + v.Metric = fam.Metric + delete(d.fams, key) + return nil + } + return d.err } // SampleDecoder wraps a Decoder to extract samples from the metric families diff --git a/vendor/github.com/prometheus/common/expfmt/encode.go b/vendor/github.com/prometheus/common/expfmt/encode.go index 64dc0eb4..7f611ffa 100644 --- a/vendor/github.com/prometheus/common/expfmt/encode.go +++ b/vendor/github.com/prometheus/common/expfmt/encode.go @@ -18,9 +18,9 @@ import ( "io" "net/http" - "github.com/golang/protobuf/proto" //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/matttproud/golang_protobuf_extensions/pbutil" "github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg" + "google.golang.org/protobuf/encoding/prototext" dto "github.com/prometheus/client_model/go" ) @@ -99,8 +99,11 @@ func NegotiateIncludingOpenMetrics(h http.Header) Format { if ac.Type == "text" && ac.SubType == "plain" && (ver == TextVersion || ver == "") { return FmtText } - if ac.Type+"/"+ac.SubType == OpenMetricsType && (ver == OpenMetricsVersion || ver == "") { - return FmtOpenMetrics + if ac.Type+"/"+ac.SubType == OpenMetricsType && (ver == OpenMetricsVersion_0_0_1 || ver == OpenMetricsVersion_1_0_0 || ver == "") { + if ver == OpenMetricsVersion_1_0_0 { + return FmtOpenMetrics_1_0_0 + } + return FmtOpenMetrics_0_0_1 } } return FmtText @@ -133,7 +136,7 @@ func NewEncoder(w io.Writer, format Format) Encoder { case FmtProtoText: return encoderCloser{ encode: func(v *dto.MetricFamily) error { - _, err := fmt.Fprintln(w, proto.MarshalTextString(v)) + _, err := fmt.Fprintln(w, prototext.Format(v)) return err }, close: func() error { return nil }, @@ -146,7 +149,7 @@ func NewEncoder(w io.Writer, format Format) Encoder { }, close: func() error { return nil }, } - case FmtOpenMetrics: + case FmtOpenMetrics_0_0_1, FmtOpenMetrics_1_0_0: return encoderCloser{ encode: func(v *dto.MetricFamily) error { _, err := MetricFamilyToOpenMetrics(w, v) diff --git a/vendor/github.com/prometheus/common/expfmt/expfmt.go b/vendor/github.com/prometheus/common/expfmt/expfmt.go index 0f176fa6..c4cb20f0 100644 --- a/vendor/github.com/prometheus/common/expfmt/expfmt.go +++ b/vendor/github.com/prometheus/common/expfmt/expfmt.go @@ -19,20 +19,22 @@ type Format string // Constants to assemble the Content-Type values for the different wire protocols. const ( - TextVersion = "0.0.4" - ProtoType = `application/vnd.google.protobuf` - ProtoProtocol = `io.prometheus.client.MetricFamily` - ProtoFmt = ProtoType + "; proto=" + ProtoProtocol + ";" - OpenMetricsType = `application/openmetrics-text` - OpenMetricsVersion = "0.0.1" + TextVersion = "0.0.4" + ProtoType = `application/vnd.google.protobuf` + ProtoProtocol = `io.prometheus.client.MetricFamily` + ProtoFmt = ProtoType + "; proto=" + ProtoProtocol + ";" + OpenMetricsType = `application/openmetrics-text` + OpenMetricsVersion_0_0_1 = "0.0.1" + OpenMetricsVersion_1_0_0 = "1.0.0" // The Content-Type values for the different wire protocols. - FmtUnknown Format = `` - FmtText Format = `text/plain; version=` + TextVersion + `; charset=utf-8` - FmtProtoDelim Format = ProtoFmt + ` encoding=delimited` - FmtProtoText Format = ProtoFmt + ` encoding=text` - FmtProtoCompact Format = ProtoFmt + ` encoding=compact-text` - FmtOpenMetrics Format = OpenMetricsType + `; version=` + OpenMetricsVersion + `; charset=utf-8` + FmtUnknown Format = `` + FmtText Format = `text/plain; version=` + TextVersion + `; charset=utf-8` + FmtProtoDelim Format = ProtoFmt + ` encoding=delimited` + FmtProtoText Format = ProtoFmt + ` encoding=text` + FmtProtoCompact Format = ProtoFmt + ` encoding=compact-text` + FmtOpenMetrics_1_0_0 Format = OpenMetricsType + `; version=` + OpenMetricsVersion_1_0_0 + `; charset=utf-8` + FmtOpenMetrics_0_0_1 Format = OpenMetricsType + `; version=` + OpenMetricsVersion_0_0_1 + `; charset=utf-8` ) const ( diff --git a/vendor/github.com/prometheus/common/expfmt/fuzz.go b/vendor/github.com/prometheus/common/expfmt/fuzz.go index f819e4f8..dfac962a 100644 --- a/vendor/github.com/prometheus/common/expfmt/fuzz.go +++ b/vendor/github.com/prometheus/common/expfmt/fuzz.go @@ -21,8 +21,8 @@ import "bytes" // Fuzz text metric parser with with github.com/dvyukov/go-fuzz: // -// go-fuzz-build github.com/prometheus/common/expfmt -// go-fuzz -bin expfmt-fuzz.zip -workdir fuzz +// go-fuzz-build github.com/prometheus/common/expfmt +// go-fuzz -bin expfmt-fuzz.zip -workdir fuzz // // Further input samples should go in the folder fuzz/corpus. func Fuzz(in []byte) int { diff --git a/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go b/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go index 9d94ae9e..21cdddcf 100644 --- a/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go +++ b/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go @@ -46,20 +46,20 @@ import ( // missing features and peculiarities to avoid complications when switching from // Prometheus to OpenMetrics or vice versa: // -// - Counters are expected to have the `_total` suffix in their metric name. In -// the output, the suffix will be truncated from the `# TYPE` and `# HELP` -// line. A counter with a missing `_total` suffix is not an error. However, -// its type will be set to `unknown` in that case to avoid invalid OpenMetrics -// output. +// - Counters are expected to have the `_total` suffix in their metric name. In +// the output, the suffix will be truncated from the `# TYPE` and `# HELP` +// line. A counter with a missing `_total` suffix is not an error. However, +// its type will be set to `unknown` in that case to avoid invalid OpenMetrics +// output. // -// - No support for the following (optional) features: `# UNIT` line, `_created` -// line, info type, stateset type, gaugehistogram type. +// - No support for the following (optional) features: `# UNIT` line, `_created` +// line, info type, stateset type, gaugehistogram type. // -// - The size of exemplar labels is not checked (i.e. it's possible to create -// exemplars that are larger than allowed by the OpenMetrics specification). +// - The size of exemplar labels is not checked (i.e. it's possible to create +// exemplars that are larger than allowed by the OpenMetrics specification). // -// - The value of Counters is not checked. (OpenMetrics doesn't allow counters -// with a `NaN` value.) +// - The value of Counters is not checked. (OpenMetrics doesn't allow counters +// with a `NaN` value.) func MetricFamilyToOpenMetrics(out io.Writer, in *dto.MetricFamily) (written int, err error) { name := in.GetName() if name == "" { diff --git a/vendor/github.com/prometheus/common/expfmt/text_parse.go b/vendor/github.com/prometheus/common/expfmt/text_parse.go index 84be0643..35db1cc9 100644 --- a/vendor/github.com/prometheus/common/expfmt/text_parse.go +++ b/vendor/github.com/prometheus/common/expfmt/text_parse.go @@ -24,8 +24,8 @@ import ( dto "github.com/prometheus/client_model/go" - "github.com/golang/protobuf/proto" //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/prometheus/common/model" + "google.golang.org/protobuf/proto" ) // A stateFn is a function that represents a state in a state machine. By @@ -142,9 +142,13 @@ func (p *TextParser) reset(in io.Reader) { func (p *TextParser) startOfLine() stateFn { p.lineCount++ if p.skipBlankTab(); p.err != nil { - // End of input reached. This is the only case where - // that is not an error but a signal that we are done. - p.err = nil + // This is the only place that we expect to see io.EOF, + // which is not an error but the signal that we are done. + // Any other error that happens to align with the start of + // a line is still an error. + if p.err == io.EOF { + p.err = nil + } return nil } switch p.currentByte { diff --git a/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/autoneg.go b/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/autoneg.go index 26e92288..a21b9d15 100644 --- a/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/autoneg.go +++ b/vendor/github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg/autoneg.go @@ -11,18 +11,18 @@ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. + Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. - Neither the name of the Open Knowledge Foundation Ltd. nor the - names of its contributors may be used to endorse or promote - products derived from this software without specific prior written - permission. + Neither the name of the Open Knowledge Foundation Ltd. nor the + names of its contributors may be used to endorse or promote + products derived from this software without specific prior written + permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT @@ -35,8 +35,6 @@ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - */ package goautoneg diff --git a/vendor/github.com/prometheus/common/model/time.go b/vendor/github.com/prometheus/common/model/time.go index c909b8aa..5727452c 100644 --- a/vendor/github.com/prometheus/common/model/time.go +++ b/vendor/github.com/prometheus/common/model/time.go @@ -18,7 +18,6 @@ import ( "errors" "fmt" "math" - "regexp" "strconv" "strings" "time" @@ -183,54 +182,78 @@ func (d *Duration) Type() string { return "duration" } -var durationRE = regexp.MustCompile("^(([0-9]+)y)?(([0-9]+)w)?(([0-9]+)d)?(([0-9]+)h)?(([0-9]+)m)?(([0-9]+)s)?(([0-9]+)ms)?$") +func isdigit(c byte) bool { return c >= '0' && c <= '9' } + +// Units are required to go in order from biggest to smallest. +// This guards against confusion from "1m1d" being 1 minute + 1 day, not 1 month + 1 day. +var unitMap = map[string]struct { + pos int + mult uint64 +}{ + "ms": {7, uint64(time.Millisecond)}, + "s": {6, uint64(time.Second)}, + "m": {5, uint64(time.Minute)}, + "h": {4, uint64(time.Hour)}, + "d": {3, uint64(24 * time.Hour)}, + "w": {2, uint64(7 * 24 * time.Hour)}, + "y": {1, uint64(365 * 24 * time.Hour)}, +} // ParseDuration parses a string into a time.Duration, assuming that a year // always has 365d, a week always has 7d, and a day always has 24h. -func ParseDuration(durationStr string) (Duration, error) { - switch durationStr { +func ParseDuration(s string) (Duration, error) { + switch s { case "0": // Allow 0 without a unit. return 0, nil case "": return 0, errors.New("empty duration string") } - matches := durationRE.FindStringSubmatch(durationStr) - if matches == nil { - return 0, fmt.Errorf("not a valid duration string: %q", durationStr) - } - var dur time.Duration - // Parse the match at pos `pos` in the regex and use `mult` to turn that - // into ms, then add that value to the total parsed duration. - var overflowErr error - m := func(pos int, mult time.Duration) { - if matches[pos] == "" { - return + orig := s + var dur uint64 + lastUnitPos := 0 + + for s != "" { + if !isdigit(s[0]) { + return 0, fmt.Errorf("not a valid duration string: %q", orig) + } + // Consume [0-9]* + i := 0 + for ; i < len(s) && isdigit(s[i]); i++ { + } + v, err := strconv.ParseUint(s[:i], 10, 0) + if err != nil { + return 0, fmt.Errorf("not a valid duration string: %q", orig) } - n, _ := strconv.Atoi(matches[pos]) + s = s[i:] + // Consume unit. + for i = 0; i < len(s) && !isdigit(s[i]); i++ { + } + if i == 0 { + return 0, fmt.Errorf("not a valid duration string: %q", orig) + } + u := s[:i] + s = s[i:] + unit, ok := unitMap[u] + if !ok { + return 0, fmt.Errorf("unknown unit %q in duration %q", u, orig) + } + if unit.pos <= lastUnitPos { // Units must go in order from biggest to smallest. + return 0, fmt.Errorf("not a valid duration string: %q", orig) + } + lastUnitPos = unit.pos // Check if the provided duration overflows time.Duration (> ~ 290years). - if n > int((1<<63-1)/mult/time.Millisecond) { - overflowErr = errors.New("duration out of range") + if v > 1<<63/unit.mult { + return 0, errors.New("duration out of range") } - d := time.Duration(n) * time.Millisecond - dur += d * mult - - if dur < 0 { - overflowErr = errors.New("duration out of range") + dur += v * unit.mult + if dur > 1<<63-1 { + return 0, errors.New("duration out of range") } } - - m(2, 1000*60*60*24*365) // y - m(4, 1000*60*60*24*7) // w - m(6, 1000*60*60*24) // d - m(8, 1000*60*60) // h - m(10, 1000*60) // m - m(12, 1000) // s - m(14, 1) // ms - - return Duration(dur), overflowErr + return Duration(dur), nil } func (d Duration) String() string { diff --git a/vendor/github.com/prometheus/common/model/value.go b/vendor/github.com/prometheus/common/model/value.go index c9d8fb1a..9eb44041 100644 --- a/vendor/github.com/prometheus/common/model/value.go +++ b/vendor/github.com/prometheus/common/model/value.go @@ -16,20 +16,12 @@ package model import ( "encoding/json" "fmt" - "math" "sort" "strconv" "strings" ) var ( - // ZeroSamplePair is the pseudo zero-value of SamplePair used to signal a - // non-existing sample pair. It is a SamplePair with timestamp Earliest and - // value 0.0. Note that the natural zero value of SamplePair has a timestamp - // of 0, which is possible to appear in a real SamplePair and thus not - // suitable to signal a non-existing SamplePair. - ZeroSamplePair = SamplePair{Timestamp: Earliest} - // ZeroSample is the pseudo zero-value of Sample used to signal a // non-existing sample. It is a Sample with timestamp Earliest, value 0.0, // and metric nil. Note that the natural zero value of Sample has a timestamp @@ -38,82 +30,14 @@ var ( ZeroSample = Sample{Timestamp: Earliest} ) -// A SampleValue is a representation of a value for a given sample at a given -// time. -type SampleValue float64 - -// MarshalJSON implements json.Marshaler. -func (v SampleValue) MarshalJSON() ([]byte, error) { - return json.Marshal(v.String()) -} - -// UnmarshalJSON implements json.Unmarshaler. -func (v *SampleValue) UnmarshalJSON(b []byte) error { - if len(b) < 2 || b[0] != '"' || b[len(b)-1] != '"' { - return fmt.Errorf("sample value must be a quoted string") - } - f, err := strconv.ParseFloat(string(b[1:len(b)-1]), 64) - if err != nil { - return err - } - *v = SampleValue(f) - return nil -} - -// Equal returns true if the value of v and o is equal or if both are NaN. Note -// that v==o is false if both are NaN. If you want the conventional float -// behavior, use == to compare two SampleValues. -func (v SampleValue) Equal(o SampleValue) bool { - if v == o { - return true - } - return math.IsNaN(float64(v)) && math.IsNaN(float64(o)) -} - -func (v SampleValue) String() string { - return strconv.FormatFloat(float64(v), 'f', -1, 64) -} - -// SamplePair pairs a SampleValue with a Timestamp. -type SamplePair struct { - Timestamp Time - Value SampleValue -} - -// MarshalJSON implements json.Marshaler. -func (s SamplePair) MarshalJSON() ([]byte, error) { - t, err := json.Marshal(s.Timestamp) - if err != nil { - return nil, err - } - v, err := json.Marshal(s.Value) - if err != nil { - return nil, err - } - return []byte(fmt.Sprintf("[%s,%s]", t, v)), nil -} - -// UnmarshalJSON implements json.Unmarshaler. -func (s *SamplePair) UnmarshalJSON(b []byte) error { - v := [...]json.Unmarshaler{&s.Timestamp, &s.Value} - return json.Unmarshal(b, &v) -} - -// Equal returns true if this SamplePair and o have equal Values and equal -// Timestamps. The semantics of Value equality is defined by SampleValue.Equal. -func (s *SamplePair) Equal(o *SamplePair) bool { - return s == o || (s.Value.Equal(o.Value) && s.Timestamp.Equal(o.Timestamp)) -} - -func (s SamplePair) String() string { - return fmt.Sprintf("%s @[%s]", s.Value, s.Timestamp) -} - -// Sample is a sample pair associated with a metric. +// Sample is a sample pair associated with a metric. A single sample must either +// define Value or Histogram but not both. Histogram == nil implies the Value +// field is used, otherwise it should be ignored. type Sample struct { - Metric Metric `json:"metric"` - Value SampleValue `json:"value"` - Timestamp Time `json:"timestamp"` + Metric Metric `json:"metric"` + Value SampleValue `json:"value"` + Timestamp Time `json:"timestamp"` + Histogram *SampleHistogram `json:"histogram"` } // Equal compares first the metrics, then the timestamp, then the value. The @@ -129,11 +53,19 @@ func (s *Sample) Equal(o *Sample) bool { if !s.Timestamp.Equal(o.Timestamp) { return false } - + if s.Histogram != nil { + return s.Histogram.Equal(o.Histogram) + } return s.Value.Equal(o.Value) } func (s Sample) String() string { + if s.Histogram != nil { + return fmt.Sprintf("%s => %s", s.Metric, SampleHistogramPair{ + Timestamp: s.Timestamp, + Histogram: s.Histogram, + }) + } return fmt.Sprintf("%s => %s", s.Metric, SamplePair{ Timestamp: s.Timestamp, Value: s.Value, @@ -142,6 +74,19 @@ func (s Sample) String() string { // MarshalJSON implements json.Marshaler. func (s Sample) MarshalJSON() ([]byte, error) { + if s.Histogram != nil { + v := struct { + Metric Metric `json:"metric"` + Histogram SampleHistogramPair `json:"histogram"` + }{ + Metric: s.Metric, + Histogram: SampleHistogramPair{ + Timestamp: s.Timestamp, + Histogram: s.Histogram, + }, + } + return json.Marshal(&v) + } v := struct { Metric Metric `json:"metric"` Value SamplePair `json:"value"` @@ -152,21 +97,25 @@ func (s Sample) MarshalJSON() ([]byte, error) { Value: s.Value, }, } - return json.Marshal(&v) } // UnmarshalJSON implements json.Unmarshaler. func (s *Sample) UnmarshalJSON(b []byte) error { v := struct { - Metric Metric `json:"metric"` - Value SamplePair `json:"value"` + Metric Metric `json:"metric"` + Value SamplePair `json:"value"` + Histogram SampleHistogramPair `json:"histogram"` }{ Metric: s.Metric, Value: SamplePair{ Timestamp: s.Timestamp, Value: s.Value, }, + Histogram: SampleHistogramPair{ + Timestamp: s.Timestamp, + Histogram: s.Histogram, + }, } if err := json.Unmarshal(b, &v); err != nil { @@ -174,8 +123,13 @@ func (s *Sample) UnmarshalJSON(b []byte) error { } s.Metric = v.Metric - s.Timestamp = v.Value.Timestamp - s.Value = v.Value.Value + if v.Histogram.Histogram != nil { + s.Timestamp = v.Histogram.Timestamp + s.Histogram = v.Histogram.Histogram + } else { + s.Timestamp = v.Value.Timestamp + s.Value = v.Value.Value + } return nil } @@ -221,80 +175,76 @@ func (s Samples) Equal(o Samples) bool { // SampleStream is a stream of Values belonging to an attached COWMetric. type SampleStream struct { - Metric Metric `json:"metric"` - Values []SamplePair `json:"values"` + Metric Metric `json:"metric"` + Values []SamplePair `json:"values"` + Histograms []SampleHistogramPair `json:"histograms"` } func (ss SampleStream) String() string { - vals := make([]string, len(ss.Values)) + valuesLength := len(ss.Values) + vals := make([]string, valuesLength+len(ss.Histograms)) for i, v := range ss.Values { vals[i] = v.String() } + for i, v := range ss.Histograms { + vals[i+valuesLength] = v.String() + } return fmt.Sprintf("%s =>\n%s", ss.Metric, strings.Join(vals, "\n")) } -// Value is a generic interface for values resulting from a query evaluation. -type Value interface { - Type() ValueType - String() string +func (ss SampleStream) MarshalJSON() ([]byte, error) { + if len(ss.Histograms) > 0 && len(ss.Values) > 0 { + v := struct { + Metric Metric `json:"metric"` + Values []SamplePair `json:"values"` + Histograms []SampleHistogramPair `json:"histograms"` + }{ + Metric: ss.Metric, + Values: ss.Values, + Histograms: ss.Histograms, + } + return json.Marshal(&v) + } else if len(ss.Histograms) > 0 { + v := struct { + Metric Metric `json:"metric"` + Histograms []SampleHistogramPair `json:"histograms"` + }{ + Metric: ss.Metric, + Histograms: ss.Histograms, + } + return json.Marshal(&v) + } else { + v := struct { + Metric Metric `json:"metric"` + Values []SamplePair `json:"values"` + }{ + Metric: ss.Metric, + Values: ss.Values, + } + return json.Marshal(&v) + } } -func (Matrix) Type() ValueType { return ValMatrix } -func (Vector) Type() ValueType { return ValVector } -func (*Scalar) Type() ValueType { return ValScalar } -func (*String) Type() ValueType { return ValString } - -type ValueType int - -const ( - ValNone ValueType = iota - ValScalar - ValVector - ValMatrix - ValString -) - -// MarshalJSON implements json.Marshaler. -func (et ValueType) MarshalJSON() ([]byte, error) { - return json.Marshal(et.String()) -} +func (ss *SampleStream) UnmarshalJSON(b []byte) error { + v := struct { + Metric Metric `json:"metric"` + Values []SamplePair `json:"values"` + Histograms []SampleHistogramPair `json:"histograms"` + }{ + Metric: ss.Metric, + Values: ss.Values, + Histograms: ss.Histograms, + } -func (et *ValueType) UnmarshalJSON(b []byte) error { - var s string - if err := json.Unmarshal(b, &s); err != nil { + if err := json.Unmarshal(b, &v); err != nil { return err } - switch s { - case "": - *et = ValNone - case "scalar": - *et = ValScalar - case "vector": - *et = ValVector - case "matrix": - *et = ValMatrix - case "string": - *et = ValString - default: - return fmt.Errorf("unknown value type %q", s) - } - return nil -} -func (e ValueType) String() string { - switch e { - case ValNone: - return "" - case ValScalar: - return "scalar" - case ValVector: - return "vector" - case ValMatrix: - return "matrix" - case ValString: - return "string" - } - panic("ValueType.String: unhandled value type") + ss.Metric = v.Metric + ss.Values = v.Values + ss.Histograms = v.Histograms + + return nil } // Scalar is a scalar value evaluated at the set timestamp. diff --git a/vendor/github.com/prometheus/common/model/value_float.go b/vendor/github.com/prometheus/common/model/value_float.go new file mode 100644 index 00000000..0f615a70 --- /dev/null +++ b/vendor/github.com/prometheus/common/model/value_float.go @@ -0,0 +1,100 @@ +// Copyright 2013 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "math" + "strconv" +) + +var ( + // ZeroSamplePair is the pseudo zero-value of SamplePair used to signal a + // non-existing sample pair. It is a SamplePair with timestamp Earliest and + // value 0.0. Note that the natural zero value of SamplePair has a timestamp + // of 0, which is possible to appear in a real SamplePair and thus not + // suitable to signal a non-existing SamplePair. + ZeroSamplePair = SamplePair{Timestamp: Earliest} +) + +// A SampleValue is a representation of a value for a given sample at a given +// time. +type SampleValue float64 + +// MarshalJSON implements json.Marshaler. +func (v SampleValue) MarshalJSON() ([]byte, error) { + return json.Marshal(v.String()) +} + +// UnmarshalJSON implements json.Unmarshaler. +func (v *SampleValue) UnmarshalJSON(b []byte) error { + if len(b) < 2 || b[0] != '"' || b[len(b)-1] != '"' { + return fmt.Errorf("sample value must be a quoted string") + } + f, err := strconv.ParseFloat(string(b[1:len(b)-1]), 64) + if err != nil { + return err + } + *v = SampleValue(f) + return nil +} + +// Equal returns true if the value of v and o is equal or if both are NaN. Note +// that v==o is false if both are NaN. If you want the conventional float +// behavior, use == to compare two SampleValues. +func (v SampleValue) Equal(o SampleValue) bool { + if v == o { + return true + } + return math.IsNaN(float64(v)) && math.IsNaN(float64(o)) +} + +func (v SampleValue) String() string { + return strconv.FormatFloat(float64(v), 'f', -1, 64) +} + +// SamplePair pairs a SampleValue with a Timestamp. +type SamplePair struct { + Timestamp Time + Value SampleValue +} + +func (s SamplePair) MarshalJSON() ([]byte, error) { + t, err := json.Marshal(s.Timestamp) + if err != nil { + return nil, err + } + v, err := json.Marshal(s.Value) + if err != nil { + return nil, err + } + return []byte(fmt.Sprintf("[%s,%s]", t, v)), nil +} + +// UnmarshalJSON implements json.Unmarshaler. +func (s *SamplePair) UnmarshalJSON(b []byte) error { + v := [...]json.Unmarshaler{&s.Timestamp, &s.Value} + return json.Unmarshal(b, &v) +} + +// Equal returns true if this SamplePair and o have equal Values and equal +// Timestamps. The semantics of Value equality is defined by SampleValue.Equal. +func (s *SamplePair) Equal(o *SamplePair) bool { + return s == o || (s.Value.Equal(o.Value) && s.Timestamp.Equal(o.Timestamp)) +} + +func (s SamplePair) String() string { + return fmt.Sprintf("%s @[%s]", s.Value, s.Timestamp) +} diff --git a/vendor/github.com/prometheus/common/model/value_histogram.go b/vendor/github.com/prometheus/common/model/value_histogram.go new file mode 100644 index 00000000..54bb038c --- /dev/null +++ b/vendor/github.com/prometheus/common/model/value_histogram.go @@ -0,0 +1,178 @@ +// Copyright 2013 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" +) + +type FloatString float64 + +func (v FloatString) String() string { + return strconv.FormatFloat(float64(v), 'f', -1, 64) +} + +func (v FloatString) MarshalJSON() ([]byte, error) { + return json.Marshal(v.String()) +} + +func (v *FloatString) UnmarshalJSON(b []byte) error { + if len(b) < 2 || b[0] != '"' || b[len(b)-1] != '"' { + return fmt.Errorf("float value must be a quoted string") + } + f, err := strconv.ParseFloat(string(b[1:len(b)-1]), 64) + if err != nil { + return err + } + *v = FloatString(f) + return nil +} + +type HistogramBucket struct { + Boundaries int32 + Lower FloatString + Upper FloatString + Count FloatString +} + +func (s HistogramBucket) MarshalJSON() ([]byte, error) { + b, err := json.Marshal(s.Boundaries) + if err != nil { + return nil, err + } + l, err := json.Marshal(s.Lower) + if err != nil { + return nil, err + } + u, err := json.Marshal(s.Upper) + if err != nil { + return nil, err + } + c, err := json.Marshal(s.Count) + if err != nil { + return nil, err + } + return []byte(fmt.Sprintf("[%s,%s,%s,%s]", b, l, u, c)), nil +} + +func (s *HistogramBucket) UnmarshalJSON(buf []byte) error { + tmp := []interface{}{&s.Boundaries, &s.Lower, &s.Upper, &s.Count} + wantLen := len(tmp) + if err := json.Unmarshal(buf, &tmp); err != nil { + return err + } + if gotLen := len(tmp); gotLen != wantLen { + return fmt.Errorf("wrong number of fields: %d != %d", gotLen, wantLen) + } + return nil +} + +func (s *HistogramBucket) Equal(o *HistogramBucket) bool { + return s == o || (s.Boundaries == o.Boundaries && s.Lower == o.Lower && s.Upper == o.Upper && s.Count == o.Count) +} + +func (b HistogramBucket) String() string { + var sb strings.Builder + lowerInclusive := b.Boundaries == 1 || b.Boundaries == 3 + upperInclusive := b.Boundaries == 0 || b.Boundaries == 3 + if lowerInclusive { + sb.WriteRune('[') + } else { + sb.WriteRune('(') + } + fmt.Fprintf(&sb, "%g,%g", b.Lower, b.Upper) + if upperInclusive { + sb.WriteRune(']') + } else { + sb.WriteRune(')') + } + fmt.Fprintf(&sb, ":%v", b.Count) + return sb.String() +} + +type HistogramBuckets []*HistogramBucket + +func (s HistogramBuckets) Equal(o HistogramBuckets) bool { + if len(s) != len(o) { + return false + } + + for i, bucket := range s { + if !bucket.Equal(o[i]) { + return false + } + } + return true +} + +type SampleHistogram struct { + Count FloatString `json:"count"` + Sum FloatString `json:"sum"` + Buckets HistogramBuckets `json:"buckets"` +} + +func (s SampleHistogram) String() string { + return fmt.Sprintf("Count: %f, Sum: %f, Buckets: %v", s.Count, s.Sum, s.Buckets) +} + +func (s *SampleHistogram) Equal(o *SampleHistogram) bool { + return s == o || (s.Count == o.Count && s.Sum == o.Sum && s.Buckets.Equal(o.Buckets)) +} + +type SampleHistogramPair struct { + Timestamp Time + // Histogram should never be nil, it's only stored as pointer for efficiency. + Histogram *SampleHistogram +} + +func (s SampleHistogramPair) MarshalJSON() ([]byte, error) { + if s.Histogram == nil { + return nil, fmt.Errorf("histogram is nil") + } + t, err := json.Marshal(s.Timestamp) + if err != nil { + return nil, err + } + v, err := json.Marshal(s.Histogram) + if err != nil { + return nil, err + } + return []byte(fmt.Sprintf("[%s,%s]", t, v)), nil +} + +func (s *SampleHistogramPair) UnmarshalJSON(buf []byte) error { + tmp := []interface{}{&s.Timestamp, &s.Histogram} + wantLen := len(tmp) + if err := json.Unmarshal(buf, &tmp); err != nil { + return err + } + if gotLen := len(tmp); gotLen != wantLen { + return fmt.Errorf("wrong number of fields: %d != %d", gotLen, wantLen) + } + if s.Histogram == nil { + return fmt.Errorf("histogram is null") + } + return nil +} + +func (s SampleHistogramPair) String() string { + return fmt.Sprintf("%s @[%s]", s.Histogram, s.Timestamp) +} + +func (s *SampleHistogramPair) Equal(o *SampleHistogramPair) bool { + return s == o || (s.Histogram.Equal(o.Histogram) && s.Timestamp.Equal(o.Timestamp)) +} diff --git a/vendor/github.com/prometheus/common/model/value_type.go b/vendor/github.com/prometheus/common/model/value_type.go new file mode 100644 index 00000000..726c50ee --- /dev/null +++ b/vendor/github.com/prometheus/common/model/value_type.go @@ -0,0 +1,83 @@ +// Copyright 2013 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// Value is a generic interface for values resulting from a query evaluation. +type Value interface { + Type() ValueType + String() string +} + +func (Matrix) Type() ValueType { return ValMatrix } +func (Vector) Type() ValueType { return ValVector } +func (*Scalar) Type() ValueType { return ValScalar } +func (*String) Type() ValueType { return ValString } + +type ValueType int + +const ( + ValNone ValueType = iota + ValScalar + ValVector + ValMatrix + ValString +) + +// MarshalJSON implements json.Marshaler. +func (et ValueType) MarshalJSON() ([]byte, error) { + return json.Marshal(et.String()) +} + +func (et *ValueType) UnmarshalJSON(b []byte) error { + var s string + if err := json.Unmarshal(b, &s); err != nil { + return err + } + switch s { + case "": + *et = ValNone + case "scalar": + *et = ValScalar + case "vector": + *et = ValVector + case "matrix": + *et = ValMatrix + case "string": + *et = ValString + default: + return fmt.Errorf("unknown value type %q", s) + } + return nil +} + +func (e ValueType) String() string { + switch e { + case ValNone: + return "" + case ValScalar: + return "scalar" + case ValVector: + return "vector" + case ValMatrix: + return "matrix" + case ValString: + return "string" + } + panic("ValueType.String: unhandled value type") +} diff --git a/vendor/github.com/prometheus/procfs/Makefile.common b/vendor/github.com/prometheus/procfs/Makefile.common index 6c8e3e21..b111d256 100644 --- a/vendor/github.com/prometheus/procfs/Makefile.common +++ b/vendor/github.com/prometheus/procfs/Makefile.common @@ -55,19 +55,22 @@ ifneq ($(shell which gotestsum),) endif endif -PROMU_VERSION ?= 0.13.0 +PROMU_VERSION ?= 0.14.0 PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_VERSION)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM).tar.gz +SKIP_GOLANGCI_LINT := GOLANGCI_LINT := GOLANGCI_LINT_OPTS ?= -GOLANGCI_LINT_VERSION ?= v1.45.2 +GOLANGCI_LINT_VERSION ?= v1.51.2 # golangci-lint only supports linux, darwin and windows platforms on i386/amd64. # windows isn't included here because of the path separator being different. ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) ifeq ($(GOHOSTARCH),$(filter $(GOHOSTARCH),amd64 i386)) # If we're in CI and there is an Actions file, that means the linter # is being run in Actions, so we don't need to run it here. - ifeq (,$(CIRCLE_JOB)) + ifneq (,$(SKIP_GOLANGCI_LINT)) + GOLANGCI_LINT := + else ifeq (,$(CIRCLE_JOB)) GOLANGCI_LINT := $(FIRST_GOPATH)/bin/golangci-lint else ifeq (,$(wildcard .github/workflows/golangci-lint.yml)) GOLANGCI_LINT := $(FIRST_GOPATH)/bin/golangci-lint @@ -88,6 +91,8 @@ BUILD_DOCKER_ARCHS = $(addprefix common-docker-,$(DOCKER_ARCHS)) PUBLISH_DOCKER_ARCHS = $(addprefix common-docker-publish-,$(DOCKER_ARCHS)) TAG_DOCKER_ARCHS = $(addprefix common-docker-tag-latest-,$(DOCKER_ARCHS)) +SANITIZED_DOCKER_IMAGE_TAG := $(subst +,-,$(DOCKER_IMAGE_TAG)) + ifeq ($(GOHOSTARCH),amd64) ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux freebsd darwin windows)) # Only supported on amd64 @@ -202,7 +207,7 @@ common-tarball: promu .PHONY: common-docker $(BUILD_DOCKER_ARCHS) common-docker: $(BUILD_DOCKER_ARCHS) $(BUILD_DOCKER_ARCHS): common-docker-%: - docker build -t "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(DOCKER_IMAGE_TAG)" \ + docker build -t "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(SANITIZED_DOCKER_IMAGE_TAG)" \ -f $(DOCKERFILE_PATH) \ --build-arg ARCH="$*" \ --build-arg OS="linux" \ @@ -211,19 +216,19 @@ $(BUILD_DOCKER_ARCHS): common-docker-%: .PHONY: common-docker-publish $(PUBLISH_DOCKER_ARCHS) common-docker-publish: $(PUBLISH_DOCKER_ARCHS) $(PUBLISH_DOCKER_ARCHS): common-docker-publish-%: - docker push "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(DOCKER_IMAGE_TAG)" + docker push "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(SANITIZED_DOCKER_IMAGE_TAG)" DOCKER_MAJOR_VERSION_TAG = $(firstword $(subst ., ,$(shell cat VERSION))) .PHONY: common-docker-tag-latest $(TAG_DOCKER_ARCHS) common-docker-tag-latest: $(TAG_DOCKER_ARCHS) $(TAG_DOCKER_ARCHS): common-docker-tag-latest-%: - docker tag "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(DOCKER_IMAGE_TAG)" "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:latest" - docker tag "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(DOCKER_IMAGE_TAG)" "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:v$(DOCKER_MAJOR_VERSION_TAG)" + docker tag "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(SANITIZED_DOCKER_IMAGE_TAG)" "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:latest" + docker tag "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:$(SANITIZED_DOCKER_IMAGE_TAG)" "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$*:v$(DOCKER_MAJOR_VERSION_TAG)" .PHONY: common-docker-manifest common-docker-manifest: - DOCKER_CLI_EXPERIMENTAL=enabled docker manifest create -a "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_TAG)" $(foreach ARCH,$(DOCKER_ARCHS),$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$(ARCH):$(DOCKER_IMAGE_TAG)) - DOCKER_CLI_EXPERIMENTAL=enabled docker manifest push "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_TAG)" + DOCKER_CLI_EXPERIMENTAL=enabled docker manifest create -a "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME):$(SANITIZED_DOCKER_IMAGE_TAG)" $(foreach ARCH,$(DOCKER_ARCHS),$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME)-linux-$(ARCH):$(SANITIZED_DOCKER_IMAGE_TAG)) + DOCKER_CLI_EXPERIMENTAL=enabled docker manifest push "$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME):$(SANITIZED_DOCKER_IMAGE_TAG)" .PHONY: promu promu: $(PROMU) diff --git a/vendor/github.com/prometheus/procfs/cpuinfo.go b/vendor/github.com/prometheus/procfs/cpuinfo.go index ff6b927d..06968ca2 100644 --- a/vendor/github.com/prometheus/procfs/cpuinfo.go +++ b/vendor/github.com/prometheus/procfs/cpuinfo.go @@ -380,6 +380,42 @@ func parseCPUInfoMips(info []byte) ([]CPUInfo, error) { return cpuinfo, nil } +func parseCPUInfoLoong(info []byte) ([]CPUInfo, error) { + scanner := bufio.NewScanner(bytes.NewReader(info)) + // find the first "processor" line + firstLine := firstNonEmptyLine(scanner) + if !strings.HasPrefix(firstLine, "system type") || !strings.Contains(firstLine, ":") { + return nil, errors.New("invalid cpuinfo file: " + firstLine) + } + field := strings.SplitN(firstLine, ": ", 2) + cpuinfo := []CPUInfo{} + systemType := field[1] + i := 0 + for scanner.Scan() { + line := scanner.Text() + if !strings.Contains(line, ":") { + continue + } + field := strings.SplitN(line, ": ", 2) + switch strings.TrimSpace(field[0]) { + case "processor": + v, err := strconv.ParseUint(field[1], 0, 32) + if err != nil { + return nil, err + } + i = int(v) + cpuinfo = append(cpuinfo, CPUInfo{}) // start of the next processor + cpuinfo[i].Processor = uint(v) + cpuinfo[i].VendorID = systemType + case "CPU Family": + cpuinfo[i].CPUFamily = field[1] + case "Model Name": + cpuinfo[i].ModelName = field[1] + } + } + return cpuinfo, nil +} + func parseCPUInfoPPC(info []byte) ([]CPUInfo, error) { scanner := bufio.NewScanner(bytes.NewReader(info)) diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_loong64.go b/vendor/github.com/prometheus/procfs/cpuinfo_loong64.go new file mode 100644 index 00000000..d88442f0 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/cpuinfo_loong64.go @@ -0,0 +1,19 @@ +// Copyright 2022 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build linux +// +build linux + +package procfs + +var parseCPUInfo = parseCPUInfoLoong diff --git a/vendor/github.com/prometheus/procfs/cpuinfo_others.go b/vendor/github.com/prometheus/procfs/cpuinfo_others.go index ea41bf2c..a6b2b312 100644 --- a/vendor/github.com/prometheus/procfs/cpuinfo_others.go +++ b/vendor/github.com/prometheus/procfs/cpuinfo_others.go @@ -11,8 +11,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -//go:build linux && !386 && !amd64 && !arm && !arm64 && !mips && !mips64 && !mips64le && !mipsle && !ppc64 && !ppc64le && !riscv64 && !s390x -// +build linux,!386,!amd64,!arm,!arm64,!mips,!mips64,!mips64le,!mipsle,!ppc64,!ppc64le,!riscv64,!s390x +//go:build linux && !386 && !amd64 && !arm && !arm64 && !loong64 && !mips && !mips64 && !mips64le && !mipsle && !ppc64 && !ppc64le && !riscv64 && !s390x +// +build linux,!386,!amd64,!arm,!arm64,!loong64,!mips,!mips64,!mips64le,!mipsle,!ppc64,!ppc64le,!riscv64,!s390x package procfs diff --git a/vendor/github.com/prometheus/procfs/doc.go b/vendor/github.com/prometheus/procfs/doc.go index d31a8260..f9d961e4 100644 --- a/vendor/github.com/prometheus/procfs/doc.go +++ b/vendor/github.com/prometheus/procfs/doc.go @@ -16,30 +16,29 @@ // // Example: // -// package main -// -// import ( -// "fmt" -// "log" -// -// "github.com/prometheus/procfs" -// ) -// -// func main() { -// p, err := procfs.Self() -// if err != nil { -// log.Fatalf("could not get process: %s", err) -// } -// -// stat, err := p.Stat() -// if err != nil { -// log.Fatalf("could not get process stat: %s", err) -// } -// -// fmt.Printf("command: %s\n", stat.Comm) -// fmt.Printf("cpu time: %fs\n", stat.CPUTime()) -// fmt.Printf("vsize: %dB\n", stat.VirtualMemory()) -// fmt.Printf("rss: %dB\n", stat.ResidentMemory()) -// } -// +// package main +// +// import ( +// "fmt" +// "log" +// +// "github.com/prometheus/procfs" +// ) +// +// func main() { +// p, err := procfs.Self() +// if err != nil { +// log.Fatalf("could not get process: %s", err) +// } +// +// stat, err := p.Stat() +// if err != nil { +// log.Fatalf("could not get process stat: %s", err) +// } +// +// fmt.Printf("command: %s\n", stat.Comm) +// fmt.Printf("cpu time: %fs\n", stat.CPUTime()) +// fmt.Printf("vsize: %dB\n", stat.VirtualMemory()) +// fmt.Printf("rss: %dB\n", stat.ResidentMemory()) +// } package procfs diff --git a/vendor/github.com/prometheus/procfs/fs.go b/vendor/github.com/prometheus/procfs/fs.go index 0102ab0f..60c551e0 100644 --- a/vendor/github.com/prometheus/procfs/fs.go +++ b/vendor/github.com/prometheus/procfs/fs.go @@ -21,6 +21,7 @@ import ( // kernel data structures. type FS struct { proc fs.FS + real bool } // DefaultMountPoint is the common mount point of the proc filesystem. @@ -39,5 +40,11 @@ func NewFS(mountPoint string) (FS, error) { if err != nil { return FS{}, err } - return FS{fs}, nil + + real, err := isRealProc(mountPoint) + if err != nil { + return FS{}, err + } + + return FS{fs, real}, nil } diff --git a/vendor/github.com/prometheus/procfs/fs_statfs_notype.go b/vendor/github.com/prometheus/procfs/fs_statfs_notype.go new file mode 100644 index 00000000..80057696 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/fs_statfs_notype.go @@ -0,0 +1,23 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build netbsd || openbsd || solaris || windows +// +build netbsd openbsd solaris windows + +package procfs + +// isRealProc returns true on architectures that don't have a Type argument +// in their Statfs_t struct +func isRealProc(mountPoint string) (bool, error) { + return true, nil +} diff --git a/vendor/github.com/prometheus/procfs/fs_statfs_type.go b/vendor/github.com/prometheus/procfs/fs_statfs_type.go new file mode 100644 index 00000000..6233217a --- /dev/null +++ b/vendor/github.com/prometheus/procfs/fs_statfs_type.go @@ -0,0 +1,33 @@ +// Copyright 2018 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build !netbsd && !openbsd && !solaris && !windows +// +build !netbsd,!openbsd,!solaris,!windows + +package procfs + +import ( + "syscall" +) + +// isRealProc determines whether supplied mountpoint is really a proc filesystem. +func isRealProc(mountPoint string) (bool, error) { + stat := syscall.Statfs_t{} + err := syscall.Statfs(mountPoint, &stat) + if err != nil { + return false, err + } + + // 0x9fa0 is PROC_SUPER_MAGIC: https://elixir.bootlin.com/linux/v6.1/source/include/uapi/linux/magic.h#L87 + return stat.Type == 0x9fa0, nil +} diff --git a/vendor/github.com/prometheus/procfs/internal/util/parse.go b/vendor/github.com/prometheus/procfs/internal/util/parse.go index b030951f..14272dc7 100644 --- a/vendor/github.com/prometheus/procfs/internal/util/parse.go +++ b/vendor/github.com/prometheus/procfs/internal/util/parse.go @@ -64,6 +64,21 @@ func ParsePInt64s(ss []string) ([]*int64, error) { return us, nil } +// Parses a uint64 from given hex in string. +func ParseHexUint64s(ss []string) ([]*uint64, error) { + us := make([]*uint64, 0, len(ss)) + for _, s := range ss { + u, err := strconv.ParseUint(s, 16, 64) + if err != nil { + return nil, err + } + + us = append(us, &u) + } + + return us, nil +} + // ReadUintFromFile reads a file and attempts to parse a uint64 from it. func ReadUintFromFile(path string) (uint64, error) { data, err := os.ReadFile(path) diff --git a/vendor/github.com/prometheus/procfs/mountstats.go b/vendor/github.com/prometheus/procfs/mountstats.go index f7a828bb..7f68890c 100644 --- a/vendor/github.com/prometheus/procfs/mountstats.go +++ b/vendor/github.com/prometheus/procfs/mountstats.go @@ -186,6 +186,8 @@ type NFSOperationStats struct { CumulativeTotalResponseMilliseconds uint64 // Duration from when a request was enqueued to when it was completely handled. CumulativeTotalRequestMilliseconds uint64 + // The average time from the point the client sends RPC requests until it receives the response. + AverageRTTMilliseconds float64 // The count of operations that complete with tk_status < 0. These statuses usually indicate error conditions. Errors uint64 } @@ -284,7 +286,8 @@ func parseMountStats(r io.Reader) ([]*Mount, error) { } // parseMount parses an entry in /proc/[pid]/mountstats in the format: -// device [device] mounted on [mount] with fstype [type] +// +// device [device] mounted on [mount] with fstype [type] func parseMount(ss []string) (*Mount, error) { if len(ss) < deviceEntryLen { return nil, fmt.Errorf("invalid device entry: %v", ss) @@ -533,7 +536,6 @@ func parseNFSOperationStats(s *bufio.Scanner) ([]NFSOperationStats, error) { ns = append(ns, n) } - opStats := NFSOperationStats{ Operation: strings.TrimSuffix(ss[0], ":"), Requests: ns[0], @@ -545,6 +547,9 @@ func parseNFSOperationStats(s *bufio.Scanner) ([]NFSOperationStats, error) { CumulativeTotalResponseMilliseconds: ns[6], CumulativeTotalRequestMilliseconds: ns[7], } + if ns[0] != 0 { + opStats.AverageRTTMilliseconds = float64(ns[6]) / float64(ns[0]) + } if len(ns) > 8 { opStats.Errors = ns[8] diff --git a/vendor/github.com/prometheus/procfs/net_conntrackstat.go b/vendor/github.com/prometheus/procfs/net_conntrackstat.go index 8300daca..64a0e946 100644 --- a/vendor/github.com/prometheus/procfs/net_conntrackstat.go +++ b/vendor/github.com/prometheus/procfs/net_conntrackstat.go @@ -18,7 +18,6 @@ import ( "bytes" "fmt" "io" - "strconv" "strings" "github.com/prometheus/procfs/internal/util" @@ -28,9 +27,13 @@ import ( // and contains netfilter conntrack statistics at one CPU core. type ConntrackStatEntry struct { Entries uint64 + Searched uint64 Found uint64 + New uint64 Invalid uint64 Ignore uint64 + Delete uint64 + DeleteList uint64 Insert uint64 InsertFailed uint64 Drop uint64 @@ -81,73 +84,34 @@ func parseConntrackStat(r io.Reader) ([]ConntrackStatEntry, error) { // Parses a ConntrackStatEntry from given array of fields. func parseConntrackStatEntry(fields []string) (*ConntrackStatEntry, error) { - if len(fields) != 17 { - return nil, fmt.Errorf("invalid conntrackstat entry, missing fields") - } - entry := &ConntrackStatEntry{} - - entries, err := parseConntrackStatField(fields[0]) - if err != nil { - return nil, err - } - entry.Entries = entries - - found, err := parseConntrackStatField(fields[2]) - if err != nil { - return nil, err - } - entry.Found = found - - invalid, err := parseConntrackStatField(fields[4]) - if err != nil { - return nil, err - } - entry.Invalid = invalid - - ignore, err := parseConntrackStatField(fields[5]) - if err != nil { - return nil, err - } - entry.Ignore = ignore - - insert, err := parseConntrackStatField(fields[8]) + entries, err := util.ParseHexUint64s(fields) if err != nil { - return nil, err + return nil, fmt.Errorf("invalid conntrackstat entry, couldn't parse fields: %s", err) } - entry.Insert = insert - - insertFailed, err := parseConntrackStatField(fields[9]) - if err != nil { - return nil, err + numEntries := len(entries) + if numEntries < 16 || numEntries > 17 { + return nil, fmt.Errorf("invalid conntrackstat entry, invalid number of fields: %d", numEntries) } - entry.InsertFailed = insertFailed - drop, err := parseConntrackStatField(fields[10]) - if err != nil { - return nil, err + stats := &ConntrackStatEntry{ + Entries: *entries[0], + Searched: *entries[1], + Found: *entries[2], + New: *entries[3], + Invalid: *entries[4], + Ignore: *entries[5], + Delete: *entries[6], + DeleteList: *entries[7], + Insert: *entries[8], + InsertFailed: *entries[9], + Drop: *entries[10], + EarlyDrop: *entries[11], } - entry.Drop = drop - earlyDrop, err := parseConntrackStatField(fields[11]) - if err != nil { - return nil, err + // Ignore missing search_restart on Linux < 2.6.35. + if numEntries == 17 { + stats.SearchRestart = *entries[16] } - entry.EarlyDrop = earlyDrop - searchRestart, err := parseConntrackStatField(fields[16]) - if err != nil { - return nil, err - } - entry.SearchRestart = searchRestart - - return entry, nil -} - -// Parses a uint64 from given hex in string. -func parseConntrackStatField(field string) (uint64, error) { - val, err := strconv.ParseUint(field, 16, 64) - if err != nil { - return 0, fmt.Errorf("couldn't parse %q field: %w", field, err) - } - return val, err + return stats, nil } diff --git a/vendor/github.com/prometheus/procfs/net_softnet.go b/vendor/github.com/prometheus/procfs/net_softnet.go index a94f86dc..540cea52 100644 --- a/vendor/github.com/prometheus/procfs/net_softnet.go +++ b/vendor/github.com/prometheus/procfs/net_softnet.go @@ -27,8 +27,9 @@ import ( // For the proc file format details, // See: // * Linux 2.6.23 https://elixir.bootlin.com/linux/v2.6.23/source/net/core/dev.c#L2343 -// * Linux 4.17 https://elixir.bootlin.com/linux/v4.17/source/net/core/net-procfs.c#L162 -// and https://elixir.bootlin.com/linux/v4.17/source/include/linux/netdevice.h#L2810. +// * Linux 2.6.39 https://elixir.bootlin.com/linux/v2.6.39/source/net/core/dev.c#L4086 +// * Linux 4.18 https://elixir.bootlin.com/linux/v4.18/source/net/core/net-procfs.c#L162 +// * Linux 5.14 https://elixir.bootlin.com/linux/v5.14/source/net/core/net-procfs.c#L169 // SoftnetStat contains a single row of data from /proc/net/softnet_stat. type SoftnetStat struct { @@ -38,6 +39,18 @@ type SoftnetStat struct { Dropped uint32 // Number of times processing packets ran out of quota. TimeSqueezed uint32 + // Number of collision occur while obtaining device lock while transmitting. + CPUCollision uint32 + // Number of times cpu woken up received_rps. + ReceivedRps uint32 + // number of times flow limit has been reached. + FlowLimitCount uint32 + // Softnet backlog status. + SoftnetBacklogLen uint32 + // CPU id owning this softnet_data. + Index uint32 + // softnet_data's Width. + Width int } var softNetProcFile = "net/softnet_stat" @@ -63,25 +76,65 @@ func parseSoftnet(r io.Reader) ([]SoftnetStat, error) { s := bufio.NewScanner(r) var stats []SoftnetStat + cpuIndex := 0 for s.Scan() { columns := strings.Fields(s.Text()) width := len(columns) + softnetStat := SoftnetStat{} if width < minColumns { return nil, fmt.Errorf("%d columns were detected, but at least %d were expected", width, minColumns) } - // We only parse the first three columns at the moment. - us, err := parseHexUint32s(columns[0:3]) - if err != nil { - return nil, err + // Linux 2.6.23 https://elixir.bootlin.com/linux/v2.6.23/source/net/core/dev.c#L2347 + if width >= minColumns { + us, err := parseHexUint32s(columns[0:9]) + if err != nil { + return nil, err + } + + softnetStat.Processed = us[0] + softnetStat.Dropped = us[1] + softnetStat.TimeSqueezed = us[2] + softnetStat.CPUCollision = us[8] + } + + // Linux 2.6.39 https://elixir.bootlin.com/linux/v2.6.39/source/net/core/dev.c#L4086 + if width >= 10 { + us, err := parseHexUint32s(columns[9:10]) + if err != nil { + return nil, err + } + + softnetStat.ReceivedRps = us[0] } - stats = append(stats, SoftnetStat{ - Processed: us[0], - Dropped: us[1], - TimeSqueezed: us[2], - }) + // Linux 4.18 https://elixir.bootlin.com/linux/v4.18/source/net/core/net-procfs.c#L162 + if width >= 11 { + us, err := parseHexUint32s(columns[10:11]) + if err != nil { + return nil, err + } + + softnetStat.FlowLimitCount = us[0] + } + + // Linux 5.14 https://elixir.bootlin.com/linux/v5.14/source/net/core/net-procfs.c#L169 + if width >= 13 { + us, err := parseHexUint32s(columns[11:13]) + if err != nil { + return nil, err + } + + softnetStat.SoftnetBacklogLen = us[0] + softnetStat.Index = us[1] + } else { + // For older kernels, create the Index based on the scan line number. + softnetStat.Index = uint32(cpuIndex) + } + softnetStat.Width = width + stats = append(stats, softnetStat) + cpuIndex++ } return stats, nil diff --git a/vendor/github.com/prometheus/procfs/net_wireless.go b/vendor/github.com/prometheus/procfs/net_wireless.go new file mode 100644 index 00000000..c80fb154 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/net_wireless.go @@ -0,0 +1,182 @@ +// Copyright 2023 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "bufio" + "bytes" + "fmt" + "io" + "strconv" + "strings" + + "github.com/prometheus/procfs/internal/util" +) + +// Wireless models the content of /proc/net/wireless. +type Wireless struct { + Name string + + // Status is the current 4-digit hex value status of the interface. + Status uint64 + + // QualityLink is the link quality. + QualityLink int + + // QualityLevel is the signal gain (dBm). + QualityLevel int + + // QualityNoise is the signal noise baseline (dBm). + QualityNoise int + + // DiscardedNwid is the number of discarded packets with wrong nwid/essid. + DiscardedNwid int + + // DiscardedCrypt is the number of discarded packets with wrong code/decode (WEP). + DiscardedCrypt int + + // DiscardedFrag is the number of discarded packets that can't perform MAC reassembly. + DiscardedFrag int + + // DiscardedRetry is the number of discarded packets that reached max MAC retries. + DiscardedRetry int + + // DiscardedMisc is the number of discarded packets for other reasons. + DiscardedMisc int + + // MissedBeacon is the number of missed beacons/superframe. + MissedBeacon int +} + +// Wireless returns kernel wireless statistics. +func (fs FS) Wireless() ([]*Wireless, error) { + b, err := util.ReadFileNoStat(fs.proc.Path("net/wireless")) + if err != nil { + return nil, err + } + + m, err := parseWireless(bytes.NewReader(b)) + if err != nil { + return nil, fmt.Errorf("failed to parse wireless: %w", err) + } + + return m, nil +} + +// parseWireless parses the contents of /proc/net/wireless. +/* +Inter-| sta-| Quality | Discarded packets | Missed | WE +face | tus | link level noise | nwid crypt frag retry misc | beacon | 22 + eth1: 0000 5. -256. -10. 0 1 0 3 0 0 + eth2: 0000 5. -256. -20. 0 2 0 4 0 0 +*/ +func parseWireless(r io.Reader) ([]*Wireless, error) { + var ( + interfaces []*Wireless + scanner = bufio.NewScanner(r) + ) + + for n := 0; scanner.Scan(); n++ { + // Skip the 2 header lines. + if n < 2 { + continue + } + + line := scanner.Text() + + parts := strings.Split(line, ":") + if len(parts) != 2 { + return nil, fmt.Errorf("expected 2 parts after splitting line by ':', got %d for line %q", len(parts), line) + } + + name := strings.TrimSpace(parts[0]) + stats := strings.Fields(parts[1]) + + if len(stats) < 10 { + return nil, fmt.Errorf("invalid number of fields in line %d, expected at least 10, got %d: %q", n, len(stats), line) + } + + status, err := strconv.ParseUint(stats[0], 16, 16) + if err != nil { + return nil, fmt.Errorf("invalid status in line %d: %q", n, line) + } + + qlink, err := strconv.Atoi(strings.TrimSuffix(stats[1], ".")) + if err != nil { + return nil, fmt.Errorf("failed to parse Quality:link as integer %q: %w", qlink, err) + } + + qlevel, err := strconv.Atoi(strings.TrimSuffix(stats[2], ".")) + if err != nil { + return nil, fmt.Errorf("failed to parse Quality:level as integer %q: %w", qlevel, err) + } + + qnoise, err := strconv.Atoi(strings.TrimSuffix(stats[3], ".")) + if err != nil { + return nil, fmt.Errorf("failed to parse Quality:noise as integer %q: %w", qnoise, err) + } + + dnwid, err := strconv.Atoi(stats[4]) + if err != nil { + return nil, fmt.Errorf("failed to parse Discarded:nwid as integer %q: %w", dnwid, err) + } + + dcrypt, err := strconv.Atoi(stats[5]) + if err != nil { + return nil, fmt.Errorf("failed to parse Discarded:crypt as integer %q: %w", dcrypt, err) + } + + dfrag, err := strconv.Atoi(stats[6]) + if err != nil { + return nil, fmt.Errorf("failed to parse Discarded:frag as integer %q: %w", dfrag, err) + } + + dretry, err := strconv.Atoi(stats[7]) + if err != nil { + return nil, fmt.Errorf("failed to parse Discarded:retry as integer %q: %w", dretry, err) + } + + dmisc, err := strconv.Atoi(stats[8]) + if err != nil { + return nil, fmt.Errorf("failed to parse Discarded:misc as integer %q: %w", dmisc, err) + } + + mbeacon, err := strconv.Atoi(stats[9]) + if err != nil { + return nil, fmt.Errorf("failed to parse Missed:beacon as integer %q: %w", mbeacon, err) + } + + w := &Wireless{ + Name: name, + Status: status, + QualityLink: qlink, + QualityLevel: qlevel, + QualityNoise: qnoise, + DiscardedNwid: dnwid, + DiscardedCrypt: dcrypt, + DiscardedFrag: dfrag, + DiscardedRetry: dretry, + DiscardedMisc: dmisc, + MissedBeacon: mbeacon, + } + + interfaces = append(interfaces, w) + } + + if err := scanner.Err(); err != nil { + return nil, fmt.Errorf("failed to scan /proc/net/wireless: %w", err) + } + + return interfaces, nil +} diff --git a/vendor/github.com/prometheus/procfs/netstat.go b/vendor/github.com/prometheus/procfs/netstat.go index dcea9c5a..742dff45 100644 --- a/vendor/github.com/prometheus/procfs/netstat.go +++ b/vendor/github.com/prometheus/procfs/netstat.go @@ -37,32 +37,46 @@ func (fs FS) NetStat() ([]NetStat, error) { var netStatsTotal []NetStat for _, filePath := range statFiles { - file, err := os.Open(filePath) + procNetstat, err := parseNetstat(filePath) if err != nil { return nil, err } + procNetstat.Filename = filepath.Base(filePath) - netStatFile := NetStat{ - Filename: filepath.Base(filePath), - Stats: make(map[string][]uint64), - } - scanner := bufio.NewScanner(file) - scanner.Scan() - // First string is always a header for stats - var headers []string - headers = append(headers, strings.Fields(scanner.Text())...) + netStatsTotal = append(netStatsTotal, procNetstat) + } + return netStatsTotal, nil +} + +// parseNetstat parses the metrics from `/proc/net/stat/` file +// and returns a NetStat structure. +func parseNetstat(filePath string) (NetStat, error) { + netStat := NetStat{ + Stats: make(map[string][]uint64), + } + file, err := os.Open(filePath) + if err != nil { + return netStat, err + } + defer file.Close() + + scanner := bufio.NewScanner(file) + scanner.Scan() - // Other strings represent per-CPU counters - for scanner.Scan() { - for num, counter := range strings.Fields(scanner.Text()) { - value, err := strconv.ParseUint(counter, 16, 64) - if err != nil { - return nil, err - } - netStatFile.Stats[headers[num]] = append(netStatFile.Stats[headers[num]], value) + // First string is always a header for stats + var headers []string + headers = append(headers, strings.Fields(scanner.Text())...) + + // Other strings represent per-CPU counters + for scanner.Scan() { + for num, counter := range strings.Fields(scanner.Text()) { + value, err := strconv.ParseUint(counter, 16, 64) + if err != nil { + return NetStat{}, err } + netStat.Stats[headers[num]] = append(netStat.Stats[headers[num]], value) } - netStatsTotal = append(netStatsTotal, netStatFile) } - return netStatsTotal, nil + + return netStat, nil } diff --git a/vendor/github.com/prometheus/procfs/proc.go b/vendor/github.com/prometheus/procfs/proc.go index c30223af..48f39daf 100644 --- a/vendor/github.com/prometheus/procfs/proc.go +++ b/vendor/github.com/prometheus/procfs/proc.go @@ -21,7 +21,6 @@ import ( "strconv" "strings" - "github.com/prometheus/procfs/internal/fs" "github.com/prometheus/procfs/internal/util" ) @@ -30,7 +29,7 @@ type Proc struct { // The process ID. PID int - fs fs.FS + fs FS } // Procs represents a list of Proc structs. @@ -92,7 +91,7 @@ func (fs FS) Proc(pid int) (Proc, error) { if _, err := os.Stat(fs.proc.Path(strconv.Itoa(pid))); err != nil { return Proc{}, err } - return Proc{PID: pid, fs: fs.proc}, nil + return Proc{PID: pid, fs: fs}, nil } // AllProcs returns a list of all currently available processes. @@ -114,7 +113,7 @@ func (fs FS) AllProcs() (Procs, error) { if err != nil { continue } - p = append(p, Proc{PID: int(pid), fs: fs.proc}) + p = append(p, Proc{PID: int(pid), fs: fs}) } return p, nil @@ -237,6 +236,19 @@ func (p Proc) FileDescriptorTargets() ([]string, error) { // FileDescriptorsLen returns the number of currently open file descriptors of // a process. func (p Proc) FileDescriptorsLen() (int, error) { + // Use fast path if available (Linux v6.2): https://github.com/torvalds/linux/commit/f1f1f2569901 + if p.fs.real { + stat, err := os.Stat(p.path("fd")) + if err != nil { + return 0, err + } + + size := stat.Size() + if size > 0 { + return int(size), nil + } + } + fds, err := p.fileDescriptors() if err != nil { return 0, err @@ -285,7 +297,7 @@ func (p Proc) fileDescriptors() ([]string, error) { } func (p Proc) path(pa ...string) string { - return p.fs.Path(append([]string{strconv.Itoa(p.PID)}, pa...)...) + return p.fs.proc.Path(append([]string{strconv.Itoa(p.PID)}, pa...)...) } // FileDescriptorsInfo retrieves information about all file descriptors of diff --git a/vendor/github.com/prometheus/procfs/proc_cgroup.go b/vendor/github.com/prometheus/procfs/proc_cgroup.go index cca03327..ea83a75f 100644 --- a/vendor/github.com/prometheus/procfs/proc_cgroup.go +++ b/vendor/github.com/prometheus/procfs/proc_cgroup.go @@ -23,7 +23,7 @@ import ( "github.com/prometheus/procfs/internal/util" ) -// Cgroup models one line from /proc/[pid]/cgroup. Each Cgroup struct describes the the placement of a PID inside a +// Cgroup models one line from /proc/[pid]/cgroup. Each Cgroup struct describes the placement of a PID inside a // specific control hierarchy. The kernel has two cgroup APIs, v1 and v2. v1 has one hierarchy per available resource // controller, while v2 has one unified hierarchy shared by all controllers. Regardless of v1 or v2, all hierarchies // contain all running processes, so the question answerable with a Cgroup struct is 'where is this process in diff --git a/vendor/github.com/prometheus/procfs/proc_interrupts.go b/vendor/github.com/prometheus/procfs/proc_interrupts.go new file mode 100644 index 00000000..9df79c23 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/proc_interrupts.go @@ -0,0 +1,98 @@ +// Copyright 2022 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "strconv" + "strings" + + "github.com/prometheus/procfs/internal/util" +) + +// Interrupt represents a single interrupt line. +type Interrupt struct { + // Info is the type of interrupt. + Info string + // Devices is the name of the device that is located at that IRQ + Devices string + // Values is the number of interrupts per CPU. + Values []string +} + +// Interrupts models the content of /proc/interrupts. Key is the IRQ number. +// - https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/deployment_guide/s2-proc-interrupts +// - https://raspberrypi.stackexchange.com/questions/105802/explanation-of-proc-interrupts-output +type Interrupts map[string]Interrupt + +// Interrupts creates a new instance from a given Proc instance. +func (p Proc) Interrupts() (Interrupts, error) { + data, err := util.ReadFileNoStat(p.path("interrupts")) + if err != nil { + return nil, err + } + return parseInterrupts(bytes.NewReader(data)) +} + +func parseInterrupts(r io.Reader) (Interrupts, error) { + var ( + interrupts = Interrupts{} + scanner = bufio.NewScanner(r) + ) + + if !scanner.Scan() { + return nil, errors.New("interrupts empty") + } + cpuNum := len(strings.Fields(scanner.Text())) // one header per cpu + + for scanner.Scan() { + parts := strings.Fields(scanner.Text()) + if len(parts) == 0 { // skip empty lines + continue + } + if len(parts) < 2 { + return nil, fmt.Errorf("not enough fields in interrupts (expected at least 2 fields but got %d): %s", len(parts), parts) + } + intName := parts[0][:len(parts[0])-1] // remove trailing : + + if len(parts) == 2 { + interrupts[intName] = Interrupt{ + Info: "", + Devices: "", + Values: []string{ + parts[1], + }, + } + continue + } + + intr := Interrupt{ + Values: parts[1 : cpuNum+1], + } + + if _, err := strconv.Atoi(intName); err == nil { // numeral interrupt + intr.Info = parts[cpuNum+1] + intr.Devices = strings.Join(parts[cpuNum+2:], " ") + } else { + intr.Info = strings.Join(parts[cpuNum+1:], " ") + } + interrupts[intName] = intr + } + + return interrupts, scanner.Err() +} diff --git a/vendor/github.com/prometheus/procfs/proc_netstat.go b/vendor/github.com/prometheus/procfs/proc_netstat.go index 48b52381..6a43bb24 100644 --- a/vendor/github.com/prometheus/procfs/proc_netstat.go +++ b/vendor/github.com/prometheus/procfs/proc_netstat.go @@ -33,139 +33,140 @@ type ProcNetstat struct { } type TcpExt struct { // nolint:revive - SyncookiesSent float64 - SyncookiesRecv float64 - SyncookiesFailed float64 - EmbryonicRsts float64 - PruneCalled float64 - RcvPruned float64 - OfoPruned float64 - OutOfWindowIcmps float64 - LockDroppedIcmps float64 - ArpFilter float64 - TW float64 - TWRecycled float64 - TWKilled float64 - PAWSActive float64 - PAWSEstab float64 - DelayedACKs float64 - DelayedACKLocked float64 - DelayedACKLost float64 - ListenOverflows float64 - ListenDrops float64 - TCPHPHits float64 - TCPPureAcks float64 - TCPHPAcks float64 - TCPRenoRecovery float64 - TCPSackRecovery float64 - TCPSACKReneging float64 - TCPSACKReorder float64 - TCPRenoReorder float64 - TCPTSReorder float64 - TCPFullUndo float64 - TCPPartialUndo float64 - TCPDSACKUndo float64 - TCPLossUndo float64 - TCPLostRetransmit float64 - TCPRenoFailures float64 - TCPSackFailures float64 - TCPLossFailures float64 - TCPFastRetrans float64 - TCPSlowStartRetrans float64 - TCPTimeouts float64 - TCPLossProbes float64 - TCPLossProbeRecovery float64 - TCPRenoRecoveryFail float64 - TCPSackRecoveryFail float64 - TCPRcvCollapsed float64 - TCPDSACKOldSent float64 - TCPDSACKOfoSent float64 - TCPDSACKRecv float64 - TCPDSACKOfoRecv float64 - TCPAbortOnData float64 - TCPAbortOnClose float64 - TCPAbortOnMemory float64 - TCPAbortOnTimeout float64 - TCPAbortOnLinger float64 - TCPAbortFailed float64 - TCPMemoryPressures float64 - TCPMemoryPressuresChrono float64 - TCPSACKDiscard float64 - TCPDSACKIgnoredOld float64 - TCPDSACKIgnoredNoUndo float64 - TCPSpuriousRTOs float64 - TCPMD5NotFound float64 - TCPMD5Unexpected float64 - TCPMD5Failure float64 - TCPSackShifted float64 - TCPSackMerged float64 - TCPSackShiftFallback float64 - TCPBacklogDrop float64 - PFMemallocDrop float64 - TCPMinTTLDrop float64 - TCPDeferAcceptDrop float64 - IPReversePathFilter float64 - TCPTimeWaitOverflow float64 - TCPReqQFullDoCookies float64 - TCPReqQFullDrop float64 - TCPRetransFail float64 - TCPRcvCoalesce float64 - TCPOFOQueue float64 - TCPOFODrop float64 - TCPOFOMerge float64 - TCPChallengeACK float64 - TCPSYNChallenge float64 - TCPFastOpenActive float64 - TCPFastOpenActiveFail float64 - TCPFastOpenPassive float64 - TCPFastOpenPassiveFail float64 - TCPFastOpenListenOverflow float64 - TCPFastOpenCookieReqd float64 - TCPFastOpenBlackhole float64 - TCPSpuriousRtxHostQueues float64 - BusyPollRxPackets float64 - TCPAutoCorking float64 - TCPFromZeroWindowAdv float64 - TCPToZeroWindowAdv float64 - TCPWantZeroWindowAdv float64 - TCPSynRetrans float64 - TCPOrigDataSent float64 - TCPHystartTrainDetect float64 - TCPHystartTrainCwnd float64 - TCPHystartDelayDetect float64 - TCPHystartDelayCwnd float64 - TCPACKSkippedSynRecv float64 - TCPACKSkippedPAWS float64 - TCPACKSkippedSeq float64 - TCPACKSkippedFinWait2 float64 - TCPACKSkippedTimeWait float64 - TCPACKSkippedChallenge float64 - TCPWinProbe float64 - TCPKeepAlive float64 - TCPMTUPFail float64 - TCPMTUPSuccess float64 - TCPWqueueTooBig float64 + SyncookiesSent *float64 + SyncookiesRecv *float64 + SyncookiesFailed *float64 + EmbryonicRsts *float64 + PruneCalled *float64 + RcvPruned *float64 + OfoPruned *float64 + OutOfWindowIcmps *float64 + LockDroppedIcmps *float64 + ArpFilter *float64 + TW *float64 + TWRecycled *float64 + TWKilled *float64 + PAWSActive *float64 + PAWSEstab *float64 + DelayedACKs *float64 + DelayedACKLocked *float64 + DelayedACKLost *float64 + ListenOverflows *float64 + ListenDrops *float64 + TCPHPHits *float64 + TCPPureAcks *float64 + TCPHPAcks *float64 + TCPRenoRecovery *float64 + TCPSackRecovery *float64 + TCPSACKReneging *float64 + TCPSACKReorder *float64 + TCPRenoReorder *float64 + TCPTSReorder *float64 + TCPFullUndo *float64 + TCPPartialUndo *float64 + TCPDSACKUndo *float64 + TCPLossUndo *float64 + TCPLostRetransmit *float64 + TCPRenoFailures *float64 + TCPSackFailures *float64 + TCPLossFailures *float64 + TCPFastRetrans *float64 + TCPSlowStartRetrans *float64 + TCPTimeouts *float64 + TCPLossProbes *float64 + TCPLossProbeRecovery *float64 + TCPRenoRecoveryFail *float64 + TCPSackRecoveryFail *float64 + TCPRcvCollapsed *float64 + TCPDSACKOldSent *float64 + TCPDSACKOfoSent *float64 + TCPDSACKRecv *float64 + TCPDSACKOfoRecv *float64 + TCPAbortOnData *float64 + TCPAbortOnClose *float64 + TCPAbortOnMemory *float64 + TCPAbortOnTimeout *float64 + TCPAbortOnLinger *float64 + TCPAbortFailed *float64 + TCPMemoryPressures *float64 + TCPMemoryPressuresChrono *float64 + TCPSACKDiscard *float64 + TCPDSACKIgnoredOld *float64 + TCPDSACKIgnoredNoUndo *float64 + TCPSpuriousRTOs *float64 + TCPMD5NotFound *float64 + TCPMD5Unexpected *float64 + TCPMD5Failure *float64 + TCPSackShifted *float64 + TCPSackMerged *float64 + TCPSackShiftFallback *float64 + TCPBacklogDrop *float64 + PFMemallocDrop *float64 + TCPMinTTLDrop *float64 + TCPDeferAcceptDrop *float64 + IPReversePathFilter *float64 + TCPTimeWaitOverflow *float64 + TCPReqQFullDoCookies *float64 + TCPReqQFullDrop *float64 + TCPRetransFail *float64 + TCPRcvCoalesce *float64 + TCPRcvQDrop *float64 + TCPOFOQueue *float64 + TCPOFODrop *float64 + TCPOFOMerge *float64 + TCPChallengeACK *float64 + TCPSYNChallenge *float64 + TCPFastOpenActive *float64 + TCPFastOpenActiveFail *float64 + TCPFastOpenPassive *float64 + TCPFastOpenPassiveFail *float64 + TCPFastOpenListenOverflow *float64 + TCPFastOpenCookieReqd *float64 + TCPFastOpenBlackhole *float64 + TCPSpuriousRtxHostQueues *float64 + BusyPollRxPackets *float64 + TCPAutoCorking *float64 + TCPFromZeroWindowAdv *float64 + TCPToZeroWindowAdv *float64 + TCPWantZeroWindowAdv *float64 + TCPSynRetrans *float64 + TCPOrigDataSent *float64 + TCPHystartTrainDetect *float64 + TCPHystartTrainCwnd *float64 + TCPHystartDelayDetect *float64 + TCPHystartDelayCwnd *float64 + TCPACKSkippedSynRecv *float64 + TCPACKSkippedPAWS *float64 + TCPACKSkippedSeq *float64 + TCPACKSkippedFinWait2 *float64 + TCPACKSkippedTimeWait *float64 + TCPACKSkippedChallenge *float64 + TCPWinProbe *float64 + TCPKeepAlive *float64 + TCPMTUPFail *float64 + TCPMTUPSuccess *float64 + TCPWqueueTooBig *float64 } type IpExt struct { // nolint:revive - InNoRoutes float64 - InTruncatedPkts float64 - InMcastPkts float64 - OutMcastPkts float64 - InBcastPkts float64 - OutBcastPkts float64 - InOctets float64 - OutOctets float64 - InMcastOctets float64 - OutMcastOctets float64 - InBcastOctets float64 - OutBcastOctets float64 - InCsumErrors float64 - InNoECTPkts float64 - InECT1Pkts float64 - InECT0Pkts float64 - InCEPkts float64 - ReasmOverlaps float64 + InNoRoutes *float64 + InTruncatedPkts *float64 + InMcastPkts *float64 + OutMcastPkts *float64 + InBcastPkts *float64 + OutBcastPkts *float64 + InOctets *float64 + OutOctets *float64 + InMcastOctets *float64 + OutMcastOctets *float64 + InBcastOctets *float64 + OutBcastOctets *float64 + InCsumErrors *float64 + InNoECTPkts *float64 + InECT1Pkts *float64 + InECT0Pkts *float64 + InCEPkts *float64 + ReasmOverlaps *float64 } func (p Proc) Netstat() (ProcNetstat, error) { @@ -174,14 +175,14 @@ func (p Proc) Netstat() (ProcNetstat, error) { if err != nil { return ProcNetstat{PID: p.PID}, err } - procNetstat, err := parseNetstat(bytes.NewReader(data), filename) + procNetstat, err := parseProcNetstat(bytes.NewReader(data), filename) procNetstat.PID = p.PID return procNetstat, err } -// parseNetstat parses the metrics from proc//net/netstat file +// parseProcNetstat parses the metrics from proc//net/netstat file // and returns a ProcNetstat structure. -func parseNetstat(r io.Reader, fileName string) (ProcNetstat, error) { +func parseProcNetstat(r io.Reader, fileName string) (ProcNetstat, error) { var ( scanner = bufio.NewScanner(r) procNetstat = ProcNetstat{} @@ -208,230 +209,232 @@ func parseNetstat(r io.Reader, fileName string) (ProcNetstat, error) { case "TcpExt": switch key { case "SyncookiesSent": - procNetstat.TcpExt.SyncookiesSent = value + procNetstat.TcpExt.SyncookiesSent = &value case "SyncookiesRecv": - procNetstat.TcpExt.SyncookiesRecv = value + procNetstat.TcpExt.SyncookiesRecv = &value case "SyncookiesFailed": - procNetstat.TcpExt.SyncookiesFailed = value + procNetstat.TcpExt.SyncookiesFailed = &value case "EmbryonicRsts": - procNetstat.TcpExt.EmbryonicRsts = value + procNetstat.TcpExt.EmbryonicRsts = &value case "PruneCalled": - procNetstat.TcpExt.PruneCalled = value + procNetstat.TcpExt.PruneCalled = &value case "RcvPruned": - procNetstat.TcpExt.RcvPruned = value + procNetstat.TcpExt.RcvPruned = &value case "OfoPruned": - procNetstat.TcpExt.OfoPruned = value + procNetstat.TcpExt.OfoPruned = &value case "OutOfWindowIcmps": - procNetstat.TcpExt.OutOfWindowIcmps = value + procNetstat.TcpExt.OutOfWindowIcmps = &value case "LockDroppedIcmps": - procNetstat.TcpExt.LockDroppedIcmps = value + procNetstat.TcpExt.LockDroppedIcmps = &value case "ArpFilter": - procNetstat.TcpExt.ArpFilter = value + procNetstat.TcpExt.ArpFilter = &value case "TW": - procNetstat.TcpExt.TW = value + procNetstat.TcpExt.TW = &value case "TWRecycled": - procNetstat.TcpExt.TWRecycled = value + procNetstat.TcpExt.TWRecycled = &value case "TWKilled": - procNetstat.TcpExt.TWKilled = value + procNetstat.TcpExt.TWKilled = &value case "PAWSActive": - procNetstat.TcpExt.PAWSActive = value + procNetstat.TcpExt.PAWSActive = &value case "PAWSEstab": - procNetstat.TcpExt.PAWSEstab = value + procNetstat.TcpExt.PAWSEstab = &value case "DelayedACKs": - procNetstat.TcpExt.DelayedACKs = value + procNetstat.TcpExt.DelayedACKs = &value case "DelayedACKLocked": - procNetstat.TcpExt.DelayedACKLocked = value + procNetstat.TcpExt.DelayedACKLocked = &value case "DelayedACKLost": - procNetstat.TcpExt.DelayedACKLost = value + procNetstat.TcpExt.DelayedACKLost = &value case "ListenOverflows": - procNetstat.TcpExt.ListenOverflows = value + procNetstat.TcpExt.ListenOverflows = &value case "ListenDrops": - procNetstat.TcpExt.ListenDrops = value + procNetstat.TcpExt.ListenDrops = &value case "TCPHPHits": - procNetstat.TcpExt.TCPHPHits = value + procNetstat.TcpExt.TCPHPHits = &value case "TCPPureAcks": - procNetstat.TcpExt.TCPPureAcks = value + procNetstat.TcpExt.TCPPureAcks = &value case "TCPHPAcks": - procNetstat.TcpExt.TCPHPAcks = value + procNetstat.TcpExt.TCPHPAcks = &value case "TCPRenoRecovery": - procNetstat.TcpExt.TCPRenoRecovery = value + procNetstat.TcpExt.TCPRenoRecovery = &value case "TCPSackRecovery": - procNetstat.TcpExt.TCPSackRecovery = value + procNetstat.TcpExt.TCPSackRecovery = &value case "TCPSACKReneging": - procNetstat.TcpExt.TCPSACKReneging = value + procNetstat.TcpExt.TCPSACKReneging = &value case "TCPSACKReorder": - procNetstat.TcpExt.TCPSACKReorder = value + procNetstat.TcpExt.TCPSACKReorder = &value case "TCPRenoReorder": - procNetstat.TcpExt.TCPRenoReorder = value + procNetstat.TcpExt.TCPRenoReorder = &value case "TCPTSReorder": - procNetstat.TcpExt.TCPTSReorder = value + procNetstat.TcpExt.TCPTSReorder = &value case "TCPFullUndo": - procNetstat.TcpExt.TCPFullUndo = value + procNetstat.TcpExt.TCPFullUndo = &value case "TCPPartialUndo": - procNetstat.TcpExt.TCPPartialUndo = value + procNetstat.TcpExt.TCPPartialUndo = &value case "TCPDSACKUndo": - procNetstat.TcpExt.TCPDSACKUndo = value + procNetstat.TcpExt.TCPDSACKUndo = &value case "TCPLossUndo": - procNetstat.TcpExt.TCPLossUndo = value + procNetstat.TcpExt.TCPLossUndo = &value case "TCPLostRetransmit": - procNetstat.TcpExt.TCPLostRetransmit = value + procNetstat.TcpExt.TCPLostRetransmit = &value case "TCPRenoFailures": - procNetstat.TcpExt.TCPRenoFailures = value + procNetstat.TcpExt.TCPRenoFailures = &value case "TCPSackFailures": - procNetstat.TcpExt.TCPSackFailures = value + procNetstat.TcpExt.TCPSackFailures = &value case "TCPLossFailures": - procNetstat.TcpExt.TCPLossFailures = value + procNetstat.TcpExt.TCPLossFailures = &value case "TCPFastRetrans": - procNetstat.TcpExt.TCPFastRetrans = value + procNetstat.TcpExt.TCPFastRetrans = &value case "TCPSlowStartRetrans": - procNetstat.TcpExt.TCPSlowStartRetrans = value + procNetstat.TcpExt.TCPSlowStartRetrans = &value case "TCPTimeouts": - procNetstat.TcpExt.TCPTimeouts = value + procNetstat.TcpExt.TCPTimeouts = &value case "TCPLossProbes": - procNetstat.TcpExt.TCPLossProbes = value + procNetstat.TcpExt.TCPLossProbes = &value case "TCPLossProbeRecovery": - procNetstat.TcpExt.TCPLossProbeRecovery = value + procNetstat.TcpExt.TCPLossProbeRecovery = &value case "TCPRenoRecoveryFail": - procNetstat.TcpExt.TCPRenoRecoveryFail = value + procNetstat.TcpExt.TCPRenoRecoveryFail = &value case "TCPSackRecoveryFail": - procNetstat.TcpExt.TCPSackRecoveryFail = value + procNetstat.TcpExt.TCPSackRecoveryFail = &value case "TCPRcvCollapsed": - procNetstat.TcpExt.TCPRcvCollapsed = value + procNetstat.TcpExt.TCPRcvCollapsed = &value case "TCPDSACKOldSent": - procNetstat.TcpExt.TCPDSACKOldSent = value + procNetstat.TcpExt.TCPDSACKOldSent = &value case "TCPDSACKOfoSent": - procNetstat.TcpExt.TCPDSACKOfoSent = value + procNetstat.TcpExt.TCPDSACKOfoSent = &value case "TCPDSACKRecv": - procNetstat.TcpExt.TCPDSACKRecv = value + procNetstat.TcpExt.TCPDSACKRecv = &value case "TCPDSACKOfoRecv": - procNetstat.TcpExt.TCPDSACKOfoRecv = value + procNetstat.TcpExt.TCPDSACKOfoRecv = &value case "TCPAbortOnData": - procNetstat.TcpExt.TCPAbortOnData = value + procNetstat.TcpExt.TCPAbortOnData = &value case "TCPAbortOnClose": - procNetstat.TcpExt.TCPAbortOnClose = value + procNetstat.TcpExt.TCPAbortOnClose = &value case "TCPDeferAcceptDrop": - procNetstat.TcpExt.TCPDeferAcceptDrop = value + procNetstat.TcpExt.TCPDeferAcceptDrop = &value case "IPReversePathFilter": - procNetstat.TcpExt.IPReversePathFilter = value + procNetstat.TcpExt.IPReversePathFilter = &value case "TCPTimeWaitOverflow": - procNetstat.TcpExt.TCPTimeWaitOverflow = value + procNetstat.TcpExt.TCPTimeWaitOverflow = &value case "TCPReqQFullDoCookies": - procNetstat.TcpExt.TCPReqQFullDoCookies = value + procNetstat.TcpExt.TCPReqQFullDoCookies = &value case "TCPReqQFullDrop": - procNetstat.TcpExt.TCPReqQFullDrop = value + procNetstat.TcpExt.TCPReqQFullDrop = &value case "TCPRetransFail": - procNetstat.TcpExt.TCPRetransFail = value + procNetstat.TcpExt.TCPRetransFail = &value case "TCPRcvCoalesce": - procNetstat.TcpExt.TCPRcvCoalesce = value + procNetstat.TcpExt.TCPRcvCoalesce = &value + case "TCPRcvQDrop": + procNetstat.TcpExt.TCPRcvQDrop = &value case "TCPOFOQueue": - procNetstat.TcpExt.TCPOFOQueue = value + procNetstat.TcpExt.TCPOFOQueue = &value case "TCPOFODrop": - procNetstat.TcpExt.TCPOFODrop = value + procNetstat.TcpExt.TCPOFODrop = &value case "TCPOFOMerge": - procNetstat.TcpExt.TCPOFOMerge = value + procNetstat.TcpExt.TCPOFOMerge = &value case "TCPChallengeACK": - procNetstat.TcpExt.TCPChallengeACK = value + procNetstat.TcpExt.TCPChallengeACK = &value case "TCPSYNChallenge": - procNetstat.TcpExt.TCPSYNChallenge = value + procNetstat.TcpExt.TCPSYNChallenge = &value case "TCPFastOpenActive": - procNetstat.TcpExt.TCPFastOpenActive = value + procNetstat.TcpExt.TCPFastOpenActive = &value case "TCPFastOpenActiveFail": - procNetstat.TcpExt.TCPFastOpenActiveFail = value + procNetstat.TcpExt.TCPFastOpenActiveFail = &value case "TCPFastOpenPassive": - procNetstat.TcpExt.TCPFastOpenPassive = value + procNetstat.TcpExt.TCPFastOpenPassive = &value case "TCPFastOpenPassiveFail": - procNetstat.TcpExt.TCPFastOpenPassiveFail = value + procNetstat.TcpExt.TCPFastOpenPassiveFail = &value case "TCPFastOpenListenOverflow": - procNetstat.TcpExt.TCPFastOpenListenOverflow = value + procNetstat.TcpExt.TCPFastOpenListenOverflow = &value case "TCPFastOpenCookieReqd": - procNetstat.TcpExt.TCPFastOpenCookieReqd = value + procNetstat.TcpExt.TCPFastOpenCookieReqd = &value case "TCPFastOpenBlackhole": - procNetstat.TcpExt.TCPFastOpenBlackhole = value + procNetstat.TcpExt.TCPFastOpenBlackhole = &value case "TCPSpuriousRtxHostQueues": - procNetstat.TcpExt.TCPSpuriousRtxHostQueues = value + procNetstat.TcpExt.TCPSpuriousRtxHostQueues = &value case "BusyPollRxPackets": - procNetstat.TcpExt.BusyPollRxPackets = value + procNetstat.TcpExt.BusyPollRxPackets = &value case "TCPAutoCorking": - procNetstat.TcpExt.TCPAutoCorking = value + procNetstat.TcpExt.TCPAutoCorking = &value case "TCPFromZeroWindowAdv": - procNetstat.TcpExt.TCPFromZeroWindowAdv = value + procNetstat.TcpExt.TCPFromZeroWindowAdv = &value case "TCPToZeroWindowAdv": - procNetstat.TcpExt.TCPToZeroWindowAdv = value + procNetstat.TcpExt.TCPToZeroWindowAdv = &value case "TCPWantZeroWindowAdv": - procNetstat.TcpExt.TCPWantZeroWindowAdv = value + procNetstat.TcpExt.TCPWantZeroWindowAdv = &value case "TCPSynRetrans": - procNetstat.TcpExt.TCPSynRetrans = value + procNetstat.TcpExt.TCPSynRetrans = &value case "TCPOrigDataSent": - procNetstat.TcpExt.TCPOrigDataSent = value + procNetstat.TcpExt.TCPOrigDataSent = &value case "TCPHystartTrainDetect": - procNetstat.TcpExt.TCPHystartTrainDetect = value + procNetstat.TcpExt.TCPHystartTrainDetect = &value case "TCPHystartTrainCwnd": - procNetstat.TcpExt.TCPHystartTrainCwnd = value + procNetstat.TcpExt.TCPHystartTrainCwnd = &value case "TCPHystartDelayDetect": - procNetstat.TcpExt.TCPHystartDelayDetect = value + procNetstat.TcpExt.TCPHystartDelayDetect = &value case "TCPHystartDelayCwnd": - procNetstat.TcpExt.TCPHystartDelayCwnd = value + procNetstat.TcpExt.TCPHystartDelayCwnd = &value case "TCPACKSkippedSynRecv": - procNetstat.TcpExt.TCPACKSkippedSynRecv = value + procNetstat.TcpExt.TCPACKSkippedSynRecv = &value case "TCPACKSkippedPAWS": - procNetstat.TcpExt.TCPACKSkippedPAWS = value + procNetstat.TcpExt.TCPACKSkippedPAWS = &value case "TCPACKSkippedSeq": - procNetstat.TcpExt.TCPACKSkippedSeq = value + procNetstat.TcpExt.TCPACKSkippedSeq = &value case "TCPACKSkippedFinWait2": - procNetstat.TcpExt.TCPACKSkippedFinWait2 = value + procNetstat.TcpExt.TCPACKSkippedFinWait2 = &value case "TCPACKSkippedTimeWait": - procNetstat.TcpExt.TCPACKSkippedTimeWait = value + procNetstat.TcpExt.TCPACKSkippedTimeWait = &value case "TCPACKSkippedChallenge": - procNetstat.TcpExt.TCPACKSkippedChallenge = value + procNetstat.TcpExt.TCPACKSkippedChallenge = &value case "TCPWinProbe": - procNetstat.TcpExt.TCPWinProbe = value + procNetstat.TcpExt.TCPWinProbe = &value case "TCPKeepAlive": - procNetstat.TcpExt.TCPKeepAlive = value + procNetstat.TcpExt.TCPKeepAlive = &value case "TCPMTUPFail": - procNetstat.TcpExt.TCPMTUPFail = value + procNetstat.TcpExt.TCPMTUPFail = &value case "TCPMTUPSuccess": - procNetstat.TcpExt.TCPMTUPSuccess = value + procNetstat.TcpExt.TCPMTUPSuccess = &value case "TCPWqueueTooBig": - procNetstat.TcpExt.TCPWqueueTooBig = value + procNetstat.TcpExt.TCPWqueueTooBig = &value } case "IpExt": switch key { case "InNoRoutes": - procNetstat.IpExt.InNoRoutes = value + procNetstat.IpExt.InNoRoutes = &value case "InTruncatedPkts": - procNetstat.IpExt.InTruncatedPkts = value + procNetstat.IpExt.InTruncatedPkts = &value case "InMcastPkts": - procNetstat.IpExt.InMcastPkts = value + procNetstat.IpExt.InMcastPkts = &value case "OutMcastPkts": - procNetstat.IpExt.OutMcastPkts = value + procNetstat.IpExt.OutMcastPkts = &value case "InBcastPkts": - procNetstat.IpExt.InBcastPkts = value + procNetstat.IpExt.InBcastPkts = &value case "OutBcastPkts": - procNetstat.IpExt.OutBcastPkts = value + procNetstat.IpExt.OutBcastPkts = &value case "InOctets": - procNetstat.IpExt.InOctets = value + procNetstat.IpExt.InOctets = &value case "OutOctets": - procNetstat.IpExt.OutOctets = value + procNetstat.IpExt.OutOctets = &value case "InMcastOctets": - procNetstat.IpExt.InMcastOctets = value + procNetstat.IpExt.InMcastOctets = &value case "OutMcastOctets": - procNetstat.IpExt.OutMcastOctets = value + procNetstat.IpExt.OutMcastOctets = &value case "InBcastOctets": - procNetstat.IpExt.InBcastOctets = value + procNetstat.IpExt.InBcastOctets = &value case "OutBcastOctets": - procNetstat.IpExt.OutBcastOctets = value + procNetstat.IpExt.OutBcastOctets = &value case "InCsumErrors": - procNetstat.IpExt.InCsumErrors = value + procNetstat.IpExt.InCsumErrors = &value case "InNoECTPkts": - procNetstat.IpExt.InNoECTPkts = value + procNetstat.IpExt.InNoECTPkts = &value case "InECT1Pkts": - procNetstat.IpExt.InECT1Pkts = value + procNetstat.IpExt.InECT1Pkts = &value case "InECT0Pkts": - procNetstat.IpExt.InECT0Pkts = value + procNetstat.IpExt.InECT0Pkts = &value case "InCEPkts": - procNetstat.IpExt.InCEPkts = value + procNetstat.IpExt.InCEPkts = &value case "ReasmOverlaps": - procNetstat.IpExt.ReasmOverlaps = value + procNetstat.IpExt.ReasmOverlaps = &value } } } diff --git a/vendor/github.com/prometheus/procfs/proc_snmp.go b/vendor/github.com/prometheus/procfs/proc_snmp.go index ae191896..6c46b718 100644 --- a/vendor/github.com/prometheus/procfs/proc_snmp.go +++ b/vendor/github.com/prometheus/procfs/proc_snmp.go @@ -37,100 +37,100 @@ type ProcSnmp struct { } type Ip struct { // nolint:revive - Forwarding float64 - DefaultTTL float64 - InReceives float64 - InHdrErrors float64 - InAddrErrors float64 - ForwDatagrams float64 - InUnknownProtos float64 - InDiscards float64 - InDelivers float64 - OutRequests float64 - OutDiscards float64 - OutNoRoutes float64 - ReasmTimeout float64 - ReasmReqds float64 - ReasmOKs float64 - ReasmFails float64 - FragOKs float64 - FragFails float64 - FragCreates float64 + Forwarding *float64 + DefaultTTL *float64 + InReceives *float64 + InHdrErrors *float64 + InAddrErrors *float64 + ForwDatagrams *float64 + InUnknownProtos *float64 + InDiscards *float64 + InDelivers *float64 + OutRequests *float64 + OutDiscards *float64 + OutNoRoutes *float64 + ReasmTimeout *float64 + ReasmReqds *float64 + ReasmOKs *float64 + ReasmFails *float64 + FragOKs *float64 + FragFails *float64 + FragCreates *float64 } -type Icmp struct { - InMsgs float64 - InErrors float64 - InCsumErrors float64 - InDestUnreachs float64 - InTimeExcds float64 - InParmProbs float64 - InSrcQuenchs float64 - InRedirects float64 - InEchos float64 - InEchoReps float64 - InTimestamps float64 - InTimestampReps float64 - InAddrMasks float64 - InAddrMaskReps float64 - OutMsgs float64 - OutErrors float64 - OutDestUnreachs float64 - OutTimeExcds float64 - OutParmProbs float64 - OutSrcQuenchs float64 - OutRedirects float64 - OutEchos float64 - OutEchoReps float64 - OutTimestamps float64 - OutTimestampReps float64 - OutAddrMasks float64 - OutAddrMaskReps float64 +type Icmp struct { // nolint:revive + InMsgs *float64 + InErrors *float64 + InCsumErrors *float64 + InDestUnreachs *float64 + InTimeExcds *float64 + InParmProbs *float64 + InSrcQuenchs *float64 + InRedirects *float64 + InEchos *float64 + InEchoReps *float64 + InTimestamps *float64 + InTimestampReps *float64 + InAddrMasks *float64 + InAddrMaskReps *float64 + OutMsgs *float64 + OutErrors *float64 + OutDestUnreachs *float64 + OutTimeExcds *float64 + OutParmProbs *float64 + OutSrcQuenchs *float64 + OutRedirects *float64 + OutEchos *float64 + OutEchoReps *float64 + OutTimestamps *float64 + OutTimestampReps *float64 + OutAddrMasks *float64 + OutAddrMaskReps *float64 } type IcmpMsg struct { - InType3 float64 - OutType3 float64 + InType3 *float64 + OutType3 *float64 } type Tcp struct { // nolint:revive - RtoAlgorithm float64 - RtoMin float64 - RtoMax float64 - MaxConn float64 - ActiveOpens float64 - PassiveOpens float64 - AttemptFails float64 - EstabResets float64 - CurrEstab float64 - InSegs float64 - OutSegs float64 - RetransSegs float64 - InErrs float64 - OutRsts float64 - InCsumErrors float64 + RtoAlgorithm *float64 + RtoMin *float64 + RtoMax *float64 + MaxConn *float64 + ActiveOpens *float64 + PassiveOpens *float64 + AttemptFails *float64 + EstabResets *float64 + CurrEstab *float64 + InSegs *float64 + OutSegs *float64 + RetransSegs *float64 + InErrs *float64 + OutRsts *float64 + InCsumErrors *float64 } type Udp struct { // nolint:revive - InDatagrams float64 - NoPorts float64 - InErrors float64 - OutDatagrams float64 - RcvbufErrors float64 - SndbufErrors float64 - InCsumErrors float64 - IgnoredMulti float64 + InDatagrams *float64 + NoPorts *float64 + InErrors *float64 + OutDatagrams *float64 + RcvbufErrors *float64 + SndbufErrors *float64 + InCsumErrors *float64 + IgnoredMulti *float64 } type UdpLite struct { // nolint:revive - InDatagrams float64 - NoPorts float64 - InErrors float64 - OutDatagrams float64 - RcvbufErrors float64 - SndbufErrors float64 - InCsumErrors float64 - IgnoredMulti float64 + InDatagrams *float64 + NoPorts *float64 + InErrors *float64 + OutDatagrams *float64 + RcvbufErrors *float64 + SndbufErrors *float64 + InCsumErrors *float64 + IgnoredMulti *float64 } func (p Proc) Snmp() (ProcSnmp, error) { @@ -173,178 +173,178 @@ func parseSnmp(r io.Reader, fileName string) (ProcSnmp, error) { case "Ip": switch key { case "Forwarding": - procSnmp.Ip.Forwarding = value + procSnmp.Ip.Forwarding = &value case "DefaultTTL": - procSnmp.Ip.DefaultTTL = value + procSnmp.Ip.DefaultTTL = &value case "InReceives": - procSnmp.Ip.InReceives = value + procSnmp.Ip.InReceives = &value case "InHdrErrors": - procSnmp.Ip.InHdrErrors = value + procSnmp.Ip.InHdrErrors = &value case "InAddrErrors": - procSnmp.Ip.InAddrErrors = value + procSnmp.Ip.InAddrErrors = &value case "ForwDatagrams": - procSnmp.Ip.ForwDatagrams = value + procSnmp.Ip.ForwDatagrams = &value case "InUnknownProtos": - procSnmp.Ip.InUnknownProtos = value + procSnmp.Ip.InUnknownProtos = &value case "InDiscards": - procSnmp.Ip.InDiscards = value + procSnmp.Ip.InDiscards = &value case "InDelivers": - procSnmp.Ip.InDelivers = value + procSnmp.Ip.InDelivers = &value case "OutRequests": - procSnmp.Ip.OutRequests = value + procSnmp.Ip.OutRequests = &value case "OutDiscards": - procSnmp.Ip.OutDiscards = value + procSnmp.Ip.OutDiscards = &value case "OutNoRoutes": - procSnmp.Ip.OutNoRoutes = value + procSnmp.Ip.OutNoRoutes = &value case "ReasmTimeout": - procSnmp.Ip.ReasmTimeout = value + procSnmp.Ip.ReasmTimeout = &value case "ReasmReqds": - procSnmp.Ip.ReasmReqds = value + procSnmp.Ip.ReasmReqds = &value case "ReasmOKs": - procSnmp.Ip.ReasmOKs = value + procSnmp.Ip.ReasmOKs = &value case "ReasmFails": - procSnmp.Ip.ReasmFails = value + procSnmp.Ip.ReasmFails = &value case "FragOKs": - procSnmp.Ip.FragOKs = value + procSnmp.Ip.FragOKs = &value case "FragFails": - procSnmp.Ip.FragFails = value + procSnmp.Ip.FragFails = &value case "FragCreates": - procSnmp.Ip.FragCreates = value + procSnmp.Ip.FragCreates = &value } case "Icmp": switch key { case "InMsgs": - procSnmp.Icmp.InMsgs = value + procSnmp.Icmp.InMsgs = &value case "InErrors": - procSnmp.Icmp.InErrors = value + procSnmp.Icmp.InErrors = &value case "InCsumErrors": - procSnmp.Icmp.InCsumErrors = value + procSnmp.Icmp.InCsumErrors = &value case "InDestUnreachs": - procSnmp.Icmp.InDestUnreachs = value + procSnmp.Icmp.InDestUnreachs = &value case "InTimeExcds": - procSnmp.Icmp.InTimeExcds = value + procSnmp.Icmp.InTimeExcds = &value case "InParmProbs": - procSnmp.Icmp.InParmProbs = value + procSnmp.Icmp.InParmProbs = &value case "InSrcQuenchs": - procSnmp.Icmp.InSrcQuenchs = value + procSnmp.Icmp.InSrcQuenchs = &value case "InRedirects": - procSnmp.Icmp.InRedirects = value + procSnmp.Icmp.InRedirects = &value case "InEchos": - procSnmp.Icmp.InEchos = value + procSnmp.Icmp.InEchos = &value case "InEchoReps": - procSnmp.Icmp.InEchoReps = value + procSnmp.Icmp.InEchoReps = &value case "InTimestamps": - procSnmp.Icmp.InTimestamps = value + procSnmp.Icmp.InTimestamps = &value case "InTimestampReps": - procSnmp.Icmp.InTimestampReps = value + procSnmp.Icmp.InTimestampReps = &value case "InAddrMasks": - procSnmp.Icmp.InAddrMasks = value + procSnmp.Icmp.InAddrMasks = &value case "InAddrMaskReps": - procSnmp.Icmp.InAddrMaskReps = value + procSnmp.Icmp.InAddrMaskReps = &value case "OutMsgs": - procSnmp.Icmp.OutMsgs = value + procSnmp.Icmp.OutMsgs = &value case "OutErrors": - procSnmp.Icmp.OutErrors = value + procSnmp.Icmp.OutErrors = &value case "OutDestUnreachs": - procSnmp.Icmp.OutDestUnreachs = value + procSnmp.Icmp.OutDestUnreachs = &value case "OutTimeExcds": - procSnmp.Icmp.OutTimeExcds = value + procSnmp.Icmp.OutTimeExcds = &value case "OutParmProbs": - procSnmp.Icmp.OutParmProbs = value + procSnmp.Icmp.OutParmProbs = &value case "OutSrcQuenchs": - procSnmp.Icmp.OutSrcQuenchs = value + procSnmp.Icmp.OutSrcQuenchs = &value case "OutRedirects": - procSnmp.Icmp.OutRedirects = value + procSnmp.Icmp.OutRedirects = &value case "OutEchos": - procSnmp.Icmp.OutEchos = value + procSnmp.Icmp.OutEchos = &value case "OutEchoReps": - procSnmp.Icmp.OutEchoReps = value + procSnmp.Icmp.OutEchoReps = &value case "OutTimestamps": - procSnmp.Icmp.OutTimestamps = value + procSnmp.Icmp.OutTimestamps = &value case "OutTimestampReps": - procSnmp.Icmp.OutTimestampReps = value + procSnmp.Icmp.OutTimestampReps = &value case "OutAddrMasks": - procSnmp.Icmp.OutAddrMasks = value + procSnmp.Icmp.OutAddrMasks = &value case "OutAddrMaskReps": - procSnmp.Icmp.OutAddrMaskReps = value + procSnmp.Icmp.OutAddrMaskReps = &value } case "IcmpMsg": switch key { case "InType3": - procSnmp.IcmpMsg.InType3 = value + procSnmp.IcmpMsg.InType3 = &value case "OutType3": - procSnmp.IcmpMsg.OutType3 = value + procSnmp.IcmpMsg.OutType3 = &value } case "Tcp": switch key { case "RtoAlgorithm": - procSnmp.Tcp.RtoAlgorithm = value + procSnmp.Tcp.RtoAlgorithm = &value case "RtoMin": - procSnmp.Tcp.RtoMin = value + procSnmp.Tcp.RtoMin = &value case "RtoMax": - procSnmp.Tcp.RtoMax = value + procSnmp.Tcp.RtoMax = &value case "MaxConn": - procSnmp.Tcp.MaxConn = value + procSnmp.Tcp.MaxConn = &value case "ActiveOpens": - procSnmp.Tcp.ActiveOpens = value + procSnmp.Tcp.ActiveOpens = &value case "PassiveOpens": - procSnmp.Tcp.PassiveOpens = value + procSnmp.Tcp.PassiveOpens = &value case "AttemptFails": - procSnmp.Tcp.AttemptFails = value + procSnmp.Tcp.AttemptFails = &value case "EstabResets": - procSnmp.Tcp.EstabResets = value + procSnmp.Tcp.EstabResets = &value case "CurrEstab": - procSnmp.Tcp.CurrEstab = value + procSnmp.Tcp.CurrEstab = &value case "InSegs": - procSnmp.Tcp.InSegs = value + procSnmp.Tcp.InSegs = &value case "OutSegs": - procSnmp.Tcp.OutSegs = value + procSnmp.Tcp.OutSegs = &value case "RetransSegs": - procSnmp.Tcp.RetransSegs = value + procSnmp.Tcp.RetransSegs = &value case "InErrs": - procSnmp.Tcp.InErrs = value + procSnmp.Tcp.InErrs = &value case "OutRsts": - procSnmp.Tcp.OutRsts = value + procSnmp.Tcp.OutRsts = &value case "InCsumErrors": - procSnmp.Tcp.InCsumErrors = value + procSnmp.Tcp.InCsumErrors = &value } case "Udp": switch key { case "InDatagrams": - procSnmp.Udp.InDatagrams = value + procSnmp.Udp.InDatagrams = &value case "NoPorts": - procSnmp.Udp.NoPorts = value + procSnmp.Udp.NoPorts = &value case "InErrors": - procSnmp.Udp.InErrors = value + procSnmp.Udp.InErrors = &value case "OutDatagrams": - procSnmp.Udp.OutDatagrams = value + procSnmp.Udp.OutDatagrams = &value case "RcvbufErrors": - procSnmp.Udp.RcvbufErrors = value + procSnmp.Udp.RcvbufErrors = &value case "SndbufErrors": - procSnmp.Udp.SndbufErrors = value + procSnmp.Udp.SndbufErrors = &value case "InCsumErrors": - procSnmp.Udp.InCsumErrors = value + procSnmp.Udp.InCsumErrors = &value case "IgnoredMulti": - procSnmp.Udp.IgnoredMulti = value + procSnmp.Udp.IgnoredMulti = &value } case "UdpLite": switch key { case "InDatagrams": - procSnmp.UdpLite.InDatagrams = value + procSnmp.UdpLite.InDatagrams = &value case "NoPorts": - procSnmp.UdpLite.NoPorts = value + procSnmp.UdpLite.NoPorts = &value case "InErrors": - procSnmp.UdpLite.InErrors = value + procSnmp.UdpLite.InErrors = &value case "OutDatagrams": - procSnmp.UdpLite.OutDatagrams = value + procSnmp.UdpLite.OutDatagrams = &value case "RcvbufErrors": - procSnmp.UdpLite.RcvbufErrors = value + procSnmp.UdpLite.RcvbufErrors = &value case "SndbufErrors": - procSnmp.UdpLite.SndbufErrors = value + procSnmp.UdpLite.SndbufErrors = &value case "InCsumErrors": - procSnmp.UdpLite.InCsumErrors = value + procSnmp.UdpLite.InCsumErrors = &value case "IgnoredMulti": - procSnmp.UdpLite.IgnoredMulti = value + procSnmp.UdpLite.IgnoredMulti = &value } } } diff --git a/vendor/github.com/prometheus/procfs/proc_snmp6.go b/vendor/github.com/prometheus/procfs/proc_snmp6.go index f611992d..3059cc6a 100644 --- a/vendor/github.com/prometheus/procfs/proc_snmp6.go +++ b/vendor/github.com/prometheus/procfs/proc_snmp6.go @@ -36,106 +36,106 @@ type ProcSnmp6 struct { } type Ip6 struct { // nolint:revive - InReceives float64 - InHdrErrors float64 - InTooBigErrors float64 - InNoRoutes float64 - InAddrErrors float64 - InUnknownProtos float64 - InTruncatedPkts float64 - InDiscards float64 - InDelivers float64 - OutForwDatagrams float64 - OutRequests float64 - OutDiscards float64 - OutNoRoutes float64 - ReasmTimeout float64 - ReasmReqds float64 - ReasmOKs float64 - ReasmFails float64 - FragOKs float64 - FragFails float64 - FragCreates float64 - InMcastPkts float64 - OutMcastPkts float64 - InOctets float64 - OutOctets float64 - InMcastOctets float64 - OutMcastOctets float64 - InBcastOctets float64 - OutBcastOctets float64 - InNoECTPkts float64 - InECT1Pkts float64 - InECT0Pkts float64 - InCEPkts float64 + InReceives *float64 + InHdrErrors *float64 + InTooBigErrors *float64 + InNoRoutes *float64 + InAddrErrors *float64 + InUnknownProtos *float64 + InTruncatedPkts *float64 + InDiscards *float64 + InDelivers *float64 + OutForwDatagrams *float64 + OutRequests *float64 + OutDiscards *float64 + OutNoRoutes *float64 + ReasmTimeout *float64 + ReasmReqds *float64 + ReasmOKs *float64 + ReasmFails *float64 + FragOKs *float64 + FragFails *float64 + FragCreates *float64 + InMcastPkts *float64 + OutMcastPkts *float64 + InOctets *float64 + OutOctets *float64 + InMcastOctets *float64 + OutMcastOctets *float64 + InBcastOctets *float64 + OutBcastOctets *float64 + InNoECTPkts *float64 + InECT1Pkts *float64 + InECT0Pkts *float64 + InCEPkts *float64 } type Icmp6 struct { - InMsgs float64 - InErrors float64 - OutMsgs float64 - OutErrors float64 - InCsumErrors float64 - InDestUnreachs float64 - InPktTooBigs float64 - InTimeExcds float64 - InParmProblems float64 - InEchos float64 - InEchoReplies float64 - InGroupMembQueries float64 - InGroupMembResponses float64 - InGroupMembReductions float64 - InRouterSolicits float64 - InRouterAdvertisements float64 - InNeighborSolicits float64 - InNeighborAdvertisements float64 - InRedirects float64 - InMLDv2Reports float64 - OutDestUnreachs float64 - OutPktTooBigs float64 - OutTimeExcds float64 - OutParmProblems float64 - OutEchos float64 - OutEchoReplies float64 - OutGroupMembQueries float64 - OutGroupMembResponses float64 - OutGroupMembReductions float64 - OutRouterSolicits float64 - OutRouterAdvertisements float64 - OutNeighborSolicits float64 - OutNeighborAdvertisements float64 - OutRedirects float64 - OutMLDv2Reports float64 - InType1 float64 - InType134 float64 - InType135 float64 - InType136 float64 - InType143 float64 - OutType133 float64 - OutType135 float64 - OutType136 float64 - OutType143 float64 + InMsgs *float64 + InErrors *float64 + OutMsgs *float64 + OutErrors *float64 + InCsumErrors *float64 + InDestUnreachs *float64 + InPktTooBigs *float64 + InTimeExcds *float64 + InParmProblems *float64 + InEchos *float64 + InEchoReplies *float64 + InGroupMembQueries *float64 + InGroupMembResponses *float64 + InGroupMembReductions *float64 + InRouterSolicits *float64 + InRouterAdvertisements *float64 + InNeighborSolicits *float64 + InNeighborAdvertisements *float64 + InRedirects *float64 + InMLDv2Reports *float64 + OutDestUnreachs *float64 + OutPktTooBigs *float64 + OutTimeExcds *float64 + OutParmProblems *float64 + OutEchos *float64 + OutEchoReplies *float64 + OutGroupMembQueries *float64 + OutGroupMembResponses *float64 + OutGroupMembReductions *float64 + OutRouterSolicits *float64 + OutRouterAdvertisements *float64 + OutNeighborSolicits *float64 + OutNeighborAdvertisements *float64 + OutRedirects *float64 + OutMLDv2Reports *float64 + InType1 *float64 + InType134 *float64 + InType135 *float64 + InType136 *float64 + InType143 *float64 + OutType133 *float64 + OutType135 *float64 + OutType136 *float64 + OutType143 *float64 } type Udp6 struct { // nolint:revive - InDatagrams float64 - NoPorts float64 - InErrors float64 - OutDatagrams float64 - RcvbufErrors float64 - SndbufErrors float64 - InCsumErrors float64 - IgnoredMulti float64 + InDatagrams *float64 + NoPorts *float64 + InErrors *float64 + OutDatagrams *float64 + RcvbufErrors *float64 + SndbufErrors *float64 + InCsumErrors *float64 + IgnoredMulti *float64 } type UdpLite6 struct { // nolint:revive - InDatagrams float64 - NoPorts float64 - InErrors float64 - OutDatagrams float64 - RcvbufErrors float64 - SndbufErrors float64 - InCsumErrors float64 + InDatagrams *float64 + NoPorts *float64 + InErrors *float64 + OutDatagrams *float64 + RcvbufErrors *float64 + SndbufErrors *float64 + InCsumErrors *float64 } func (p Proc) Snmp6() (ProcSnmp6, error) { @@ -182,197 +182,197 @@ func parseSNMP6Stats(r io.Reader) (ProcSnmp6, error) { case "Ip6": switch key { case "InReceives": - procSnmp6.Ip6.InReceives = value + procSnmp6.Ip6.InReceives = &value case "InHdrErrors": - procSnmp6.Ip6.InHdrErrors = value + procSnmp6.Ip6.InHdrErrors = &value case "InTooBigErrors": - procSnmp6.Ip6.InTooBigErrors = value + procSnmp6.Ip6.InTooBigErrors = &value case "InNoRoutes": - procSnmp6.Ip6.InNoRoutes = value + procSnmp6.Ip6.InNoRoutes = &value case "InAddrErrors": - procSnmp6.Ip6.InAddrErrors = value + procSnmp6.Ip6.InAddrErrors = &value case "InUnknownProtos": - procSnmp6.Ip6.InUnknownProtos = value + procSnmp6.Ip6.InUnknownProtos = &value case "InTruncatedPkts": - procSnmp6.Ip6.InTruncatedPkts = value + procSnmp6.Ip6.InTruncatedPkts = &value case "InDiscards": - procSnmp6.Ip6.InDiscards = value + procSnmp6.Ip6.InDiscards = &value case "InDelivers": - procSnmp6.Ip6.InDelivers = value + procSnmp6.Ip6.InDelivers = &value case "OutForwDatagrams": - procSnmp6.Ip6.OutForwDatagrams = value + procSnmp6.Ip6.OutForwDatagrams = &value case "OutRequests": - procSnmp6.Ip6.OutRequests = value + procSnmp6.Ip6.OutRequests = &value case "OutDiscards": - procSnmp6.Ip6.OutDiscards = value + procSnmp6.Ip6.OutDiscards = &value case "OutNoRoutes": - procSnmp6.Ip6.OutNoRoutes = value + procSnmp6.Ip6.OutNoRoutes = &value case "ReasmTimeout": - procSnmp6.Ip6.ReasmTimeout = value + procSnmp6.Ip6.ReasmTimeout = &value case "ReasmReqds": - procSnmp6.Ip6.ReasmReqds = value + procSnmp6.Ip6.ReasmReqds = &value case "ReasmOKs": - procSnmp6.Ip6.ReasmOKs = value + procSnmp6.Ip6.ReasmOKs = &value case "ReasmFails": - procSnmp6.Ip6.ReasmFails = value + procSnmp6.Ip6.ReasmFails = &value case "FragOKs": - procSnmp6.Ip6.FragOKs = value + procSnmp6.Ip6.FragOKs = &value case "FragFails": - procSnmp6.Ip6.FragFails = value + procSnmp6.Ip6.FragFails = &value case "FragCreates": - procSnmp6.Ip6.FragCreates = value + procSnmp6.Ip6.FragCreates = &value case "InMcastPkts": - procSnmp6.Ip6.InMcastPkts = value + procSnmp6.Ip6.InMcastPkts = &value case "OutMcastPkts": - procSnmp6.Ip6.OutMcastPkts = value + procSnmp6.Ip6.OutMcastPkts = &value case "InOctets": - procSnmp6.Ip6.InOctets = value + procSnmp6.Ip6.InOctets = &value case "OutOctets": - procSnmp6.Ip6.OutOctets = value + procSnmp6.Ip6.OutOctets = &value case "InMcastOctets": - procSnmp6.Ip6.InMcastOctets = value + procSnmp6.Ip6.InMcastOctets = &value case "OutMcastOctets": - procSnmp6.Ip6.OutMcastOctets = value + procSnmp6.Ip6.OutMcastOctets = &value case "InBcastOctets": - procSnmp6.Ip6.InBcastOctets = value + procSnmp6.Ip6.InBcastOctets = &value case "OutBcastOctets": - procSnmp6.Ip6.OutBcastOctets = value + procSnmp6.Ip6.OutBcastOctets = &value case "InNoECTPkts": - procSnmp6.Ip6.InNoECTPkts = value + procSnmp6.Ip6.InNoECTPkts = &value case "InECT1Pkts": - procSnmp6.Ip6.InECT1Pkts = value + procSnmp6.Ip6.InECT1Pkts = &value case "InECT0Pkts": - procSnmp6.Ip6.InECT0Pkts = value + procSnmp6.Ip6.InECT0Pkts = &value case "InCEPkts": - procSnmp6.Ip6.InCEPkts = value + procSnmp6.Ip6.InCEPkts = &value } case "Icmp6": switch key { case "InMsgs": - procSnmp6.Icmp6.InMsgs = value + procSnmp6.Icmp6.InMsgs = &value case "InErrors": - procSnmp6.Icmp6.InErrors = value + procSnmp6.Icmp6.InErrors = &value case "OutMsgs": - procSnmp6.Icmp6.OutMsgs = value + procSnmp6.Icmp6.OutMsgs = &value case "OutErrors": - procSnmp6.Icmp6.OutErrors = value + procSnmp6.Icmp6.OutErrors = &value case "InCsumErrors": - procSnmp6.Icmp6.InCsumErrors = value + procSnmp6.Icmp6.InCsumErrors = &value case "InDestUnreachs": - procSnmp6.Icmp6.InDestUnreachs = value + procSnmp6.Icmp6.InDestUnreachs = &value case "InPktTooBigs": - procSnmp6.Icmp6.InPktTooBigs = value + procSnmp6.Icmp6.InPktTooBigs = &value case "InTimeExcds": - procSnmp6.Icmp6.InTimeExcds = value + procSnmp6.Icmp6.InTimeExcds = &value case "InParmProblems": - procSnmp6.Icmp6.InParmProblems = value + procSnmp6.Icmp6.InParmProblems = &value case "InEchos": - procSnmp6.Icmp6.InEchos = value + procSnmp6.Icmp6.InEchos = &value case "InEchoReplies": - procSnmp6.Icmp6.InEchoReplies = value + procSnmp6.Icmp6.InEchoReplies = &value case "InGroupMembQueries": - procSnmp6.Icmp6.InGroupMembQueries = value + procSnmp6.Icmp6.InGroupMembQueries = &value case "InGroupMembResponses": - procSnmp6.Icmp6.InGroupMembResponses = value + procSnmp6.Icmp6.InGroupMembResponses = &value case "InGroupMembReductions": - procSnmp6.Icmp6.InGroupMembReductions = value + procSnmp6.Icmp6.InGroupMembReductions = &value case "InRouterSolicits": - procSnmp6.Icmp6.InRouterSolicits = value + procSnmp6.Icmp6.InRouterSolicits = &value case "InRouterAdvertisements": - procSnmp6.Icmp6.InRouterAdvertisements = value + procSnmp6.Icmp6.InRouterAdvertisements = &value case "InNeighborSolicits": - procSnmp6.Icmp6.InNeighborSolicits = value + procSnmp6.Icmp6.InNeighborSolicits = &value case "InNeighborAdvertisements": - procSnmp6.Icmp6.InNeighborAdvertisements = value + procSnmp6.Icmp6.InNeighborAdvertisements = &value case "InRedirects": - procSnmp6.Icmp6.InRedirects = value + procSnmp6.Icmp6.InRedirects = &value case "InMLDv2Reports": - procSnmp6.Icmp6.InMLDv2Reports = value + procSnmp6.Icmp6.InMLDv2Reports = &value case "OutDestUnreachs": - procSnmp6.Icmp6.OutDestUnreachs = value + procSnmp6.Icmp6.OutDestUnreachs = &value case "OutPktTooBigs": - procSnmp6.Icmp6.OutPktTooBigs = value + procSnmp6.Icmp6.OutPktTooBigs = &value case "OutTimeExcds": - procSnmp6.Icmp6.OutTimeExcds = value + procSnmp6.Icmp6.OutTimeExcds = &value case "OutParmProblems": - procSnmp6.Icmp6.OutParmProblems = value + procSnmp6.Icmp6.OutParmProblems = &value case "OutEchos": - procSnmp6.Icmp6.OutEchos = value + procSnmp6.Icmp6.OutEchos = &value case "OutEchoReplies": - procSnmp6.Icmp6.OutEchoReplies = value + procSnmp6.Icmp6.OutEchoReplies = &value case "OutGroupMembQueries": - procSnmp6.Icmp6.OutGroupMembQueries = value + procSnmp6.Icmp6.OutGroupMembQueries = &value case "OutGroupMembResponses": - procSnmp6.Icmp6.OutGroupMembResponses = value + procSnmp6.Icmp6.OutGroupMembResponses = &value case "OutGroupMembReductions": - procSnmp6.Icmp6.OutGroupMembReductions = value + procSnmp6.Icmp6.OutGroupMembReductions = &value case "OutRouterSolicits": - procSnmp6.Icmp6.OutRouterSolicits = value + procSnmp6.Icmp6.OutRouterSolicits = &value case "OutRouterAdvertisements": - procSnmp6.Icmp6.OutRouterAdvertisements = value + procSnmp6.Icmp6.OutRouterAdvertisements = &value case "OutNeighborSolicits": - procSnmp6.Icmp6.OutNeighborSolicits = value + procSnmp6.Icmp6.OutNeighborSolicits = &value case "OutNeighborAdvertisements": - procSnmp6.Icmp6.OutNeighborAdvertisements = value + procSnmp6.Icmp6.OutNeighborAdvertisements = &value case "OutRedirects": - procSnmp6.Icmp6.OutRedirects = value + procSnmp6.Icmp6.OutRedirects = &value case "OutMLDv2Reports": - procSnmp6.Icmp6.OutMLDv2Reports = value + procSnmp6.Icmp6.OutMLDv2Reports = &value case "InType1": - procSnmp6.Icmp6.InType1 = value + procSnmp6.Icmp6.InType1 = &value case "InType134": - procSnmp6.Icmp6.InType134 = value + procSnmp6.Icmp6.InType134 = &value case "InType135": - procSnmp6.Icmp6.InType135 = value + procSnmp6.Icmp6.InType135 = &value case "InType136": - procSnmp6.Icmp6.InType136 = value + procSnmp6.Icmp6.InType136 = &value case "InType143": - procSnmp6.Icmp6.InType143 = value + procSnmp6.Icmp6.InType143 = &value case "OutType133": - procSnmp6.Icmp6.OutType133 = value + procSnmp6.Icmp6.OutType133 = &value case "OutType135": - procSnmp6.Icmp6.OutType135 = value + procSnmp6.Icmp6.OutType135 = &value case "OutType136": - procSnmp6.Icmp6.OutType136 = value + procSnmp6.Icmp6.OutType136 = &value case "OutType143": - procSnmp6.Icmp6.OutType143 = value + procSnmp6.Icmp6.OutType143 = &value } case "Udp6": switch key { case "InDatagrams": - procSnmp6.Udp6.InDatagrams = value + procSnmp6.Udp6.InDatagrams = &value case "NoPorts": - procSnmp6.Udp6.NoPorts = value + procSnmp6.Udp6.NoPorts = &value case "InErrors": - procSnmp6.Udp6.InErrors = value + procSnmp6.Udp6.InErrors = &value case "OutDatagrams": - procSnmp6.Udp6.OutDatagrams = value + procSnmp6.Udp6.OutDatagrams = &value case "RcvbufErrors": - procSnmp6.Udp6.RcvbufErrors = value + procSnmp6.Udp6.RcvbufErrors = &value case "SndbufErrors": - procSnmp6.Udp6.SndbufErrors = value + procSnmp6.Udp6.SndbufErrors = &value case "InCsumErrors": - procSnmp6.Udp6.InCsumErrors = value + procSnmp6.Udp6.InCsumErrors = &value case "IgnoredMulti": - procSnmp6.Udp6.IgnoredMulti = value + procSnmp6.Udp6.IgnoredMulti = &value } case "UdpLite6": switch key { case "InDatagrams": - procSnmp6.UdpLite6.InDatagrams = value + procSnmp6.UdpLite6.InDatagrams = &value case "NoPorts": - procSnmp6.UdpLite6.NoPorts = value + procSnmp6.UdpLite6.NoPorts = &value case "InErrors": - procSnmp6.UdpLite6.InErrors = value + procSnmp6.UdpLite6.InErrors = &value case "OutDatagrams": - procSnmp6.UdpLite6.OutDatagrams = value + procSnmp6.UdpLite6.OutDatagrams = &value case "RcvbufErrors": - procSnmp6.UdpLite6.RcvbufErrors = value + procSnmp6.UdpLite6.RcvbufErrors = &value case "SndbufErrors": - procSnmp6.UdpLite6.SndbufErrors = value + procSnmp6.UdpLite6.SndbufErrors = &value case "InCsumErrors": - procSnmp6.UdpLite6.InCsumErrors = value + procSnmp6.UdpLite6.InCsumErrors = &value } } } diff --git a/vendor/github.com/prometheus/procfs/proc_stat.go b/vendor/github.com/prometheus/procfs/proc_stat.go index 06c556ef..14b249f4 100644 --- a/vendor/github.com/prometheus/procfs/proc_stat.go +++ b/vendor/github.com/prometheus/procfs/proc_stat.go @@ -18,7 +18,6 @@ import ( "fmt" "os" - "github.com/prometheus/procfs/internal/fs" "github.com/prometheus/procfs/internal/util" ) @@ -102,6 +101,8 @@ type ProcStat struct { RSS int // Soft limit in bytes on the rss of the process. RSSLimit uint64 + // CPU number last executed on. + Processor uint // Real-time scheduling priority, a number in the range 1 to 99 for processes // scheduled under a real-time policy, or 0, for non-real-time processes. RTPriority uint @@ -110,7 +111,7 @@ type ProcStat struct { // Aggregated block I/O delays, measured in clock ticks (centiseconds). DelayAcctBlkIOTicks uint64 - proc fs.FS + proc FS } // NewStat returns the current status information of the process. @@ -184,7 +185,7 @@ func (p Proc) Stat() (ProcStat, error) { &ignoreUint64, &ignoreUint64, &ignoreInt64, - &ignoreInt64, + &s.Processor, &s.RTPriority, &s.Policy, &s.DelayAcctBlkIOTicks, @@ -208,8 +209,7 @@ func (s ProcStat) ResidentMemory() int { // StartTime returns the unix timestamp of the process in seconds. func (s ProcStat) StartTime() (float64, error) { - fs := FS{proc: s.proc} - stat, err := fs.Stat() + stat, err := s.proc.Stat() if err != nil { return 0, err } diff --git a/vendor/github.com/prometheus/procfs/proc_status.go b/vendor/github.com/prometheus/procfs/proc_status.go index 594022de..c055d075 100644 --- a/vendor/github.com/prometheus/procfs/proc_status.go +++ b/vendor/github.com/prometheus/procfs/proc_status.go @@ -15,6 +15,7 @@ package procfs import ( "bytes" + "sort" "strconv" "strings" @@ -76,6 +77,9 @@ type ProcStatus struct { UIDs [4]string // GIDs of the process (Real, effective, saved set, and filesystem GIDs) GIDs [4]string + + // CpusAllowedList: List of cpu cores processes are allowed to run on. + CpusAllowedList []uint64 } // NewStatus returns the current status information of the process. @@ -96,10 +100,10 @@ func (p Proc) NewStatus() (ProcStatus, error) { kv := strings.SplitN(line, ":", 2) // removes spaces - k := string(strings.TrimSpace(kv[0])) - v := string(strings.TrimSpace(kv[1])) + k := strings.TrimSpace(kv[0]) + v := strings.TrimSpace(kv[1]) // removes "kB" - v = string(bytes.Trim([]byte(v), " kB")) + v = strings.TrimSuffix(v, " kB") // value to int when possible // we can skip error check here, 'cause vKBytes is not used when value is a string @@ -161,10 +165,38 @@ func (s *ProcStatus) fillStatus(k string, vString string, vUint uint64, vUintByt s.VoluntaryCtxtSwitches = vUint case "nonvoluntary_ctxt_switches": s.NonVoluntaryCtxtSwitches = vUint + case "Cpus_allowed_list": + s.CpusAllowedList = calcCpusAllowedList(vString) } + } // TotalCtxtSwitches returns the total context switch. func (s ProcStatus) TotalCtxtSwitches() uint64 { return s.VoluntaryCtxtSwitches + s.NonVoluntaryCtxtSwitches } + +func calcCpusAllowedList(cpuString string) []uint64 { + s := strings.Split(cpuString, ",") + + var g []uint64 + + for _, cpu := range s { + // parse cpu ranges, example: 1-3=[1,2,3] + if l := strings.Split(strings.TrimSpace(cpu), "-"); len(l) > 1 { + startCPU, _ := strconv.ParseUint(l[0], 10, 64) + endCPU, _ := strconv.ParseUint(l[1], 10, 64) + + for i := startCPU; i <= endCPU; i++ { + g = append(g, i) + } + } else if len(l) == 1 { + cpu, _ := strconv.ParseUint(l[0], 10, 64) + g = append(g, cpu) + } + + } + + sort.Slice(g, func(i, j int) bool { return g[i] < g[j] }) + return g +} diff --git a/vendor/github.com/prometheus/procfs/stat.go b/vendor/github.com/prometheus/procfs/stat.go index 33f97caa..586af48a 100644 --- a/vendor/github.com/prometheus/procfs/stat.go +++ b/vendor/github.com/prometheus/procfs/stat.go @@ -62,7 +62,7 @@ type Stat struct { // Summed up cpu statistics. CPUTotal CPUStat // Per-CPU statistics. - CPU []CPUStat + CPU map[int64]CPUStat // Number of times interrupts were handled, which contains numbered and unnumbered IRQs. IRQTotal uint64 // Number of times a numbered IRQ was triggered. @@ -170,10 +170,23 @@ func (fs FS) Stat() (Stat, error) { if err != nil { return Stat{}, err } + procStat, err := parseStat(bytes.NewReader(data), fileName) + if err != nil { + return Stat{}, err + } + return procStat, nil +} - stat := Stat{} +// parseStat parses the metrics from /proc/[pid]/stat. +func parseStat(r io.Reader, fileName string) (Stat, error) { + var ( + scanner = bufio.NewScanner(r) + stat = Stat{ + CPU: make(map[int64]CPUStat), + } + err error + ) - scanner := bufio.NewScanner(bytes.NewReader(data)) for scanner.Scan() { line := scanner.Text() parts := strings.Fields(scanner.Text()) @@ -228,9 +241,6 @@ func (fs FS) Stat() (Stat, error) { if cpuID == -1 { stat.CPUTotal = cpuStat } else { - for int64(len(stat.CPU)) <= cpuID { - stat.CPU = append(stat.CPU, CPUStat{}) - } stat.CPU[cpuID] = cpuStat } } diff --git a/vendor/github.com/prometheus/procfs/thread.go b/vendor/github.com/prometheus/procfs/thread.go new file mode 100644 index 00000000..490c1470 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/thread.go @@ -0,0 +1,80 @@ +// Copyright 2022 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "fmt" + "os" + "strconv" + + fsi "github.com/prometheus/procfs/internal/fs" +) + +// Provide access to /proc/PID/task/TID files, for thread specific values. Since +// such files have the same structure as /proc/PID/ ones, the data structures +// and the parsers for the latter may be reused. + +// AllThreads returns a list of all currently available threads under /proc/PID. +func AllThreads(pid int) (Procs, error) { + fs, err := NewFS(DefaultMountPoint) + if err != nil { + return Procs{}, err + } + return fs.AllThreads(pid) +} + +// AllThreads returns a list of all currently available threads for PID. +func (fs FS) AllThreads(pid int) (Procs, error) { + taskPath := fs.proc.Path(strconv.Itoa(pid), "task") + d, err := os.Open(taskPath) + if err != nil { + return Procs{}, err + } + defer d.Close() + + names, err := d.Readdirnames(-1) + if err != nil { + return Procs{}, fmt.Errorf("could not read %q: %w", d.Name(), err) + } + + t := Procs{} + for _, n := range names { + tid, err := strconv.ParseInt(n, 10, 64) + if err != nil { + continue + } + + t = append(t, Proc{PID: int(tid), fs: FS{fsi.FS(taskPath), fs.real}}) + } + + return t, nil +} + +// Thread returns a process for a given PID, TID. +func (fs FS) Thread(pid, tid int) (Proc, error) { + taskPath := fs.proc.Path(strconv.Itoa(pid), "task") + if _, err := os.Stat(taskPath); err != nil { + return Proc{}, err + } + return Proc{PID: tid, fs: FS{fsi.FS(taskPath), fs.real}}, nil +} + +// Thread returns a process for a given TID of Proc. +func (proc Proc) Thread(tid int) (Proc, error) { + tfs := FS{fsi.FS(proc.path("task")), proc.fs.real} + if _, err := os.Stat(tfs.proc.Path(strconv.Itoa(tid))); err != nil { + return Proc{}, err + } + return Proc{PID: tid, fs: tfs}, nil +} diff --git a/vendor/github.com/prometheus/procfs/vm.go b/vendor/github.com/prometheus/procfs/vm.go index 20ceb77e..cdedcae9 100644 --- a/vendor/github.com/prometheus/procfs/vm.go +++ b/vendor/github.com/prometheus/procfs/vm.go @@ -26,7 +26,9 @@ import ( ) // The VM interface is described at -// https://www.kernel.org/doc/Documentation/sysctl/vm.txt +// +// https://www.kernel.org/doc/Documentation/sysctl/vm.txt +// // Each setting is exposed as a single file. // Each file contains one line with a single numerical value, except lowmem_reserve_ratio which holds an array // and numa_zonelist_order (deprecated) which is a string. diff --git a/vendor/k8s.io/kube-openapi/pkg/cached/cache.go b/vendor/k8s.io/kube-openapi/pkg/cached/cache.go index 16e34853..76415b78 100644 --- a/vendor/k8s.io/kube-openapi/pkg/cached/cache.go +++ b/vendor/k8s.io/kube-openapi/pkg/cached/cache.go @@ -19,6 +19,8 @@ limitations under the License. // operations are not repeated unnecessarily. The operations can be // created as a tree, and replaced dynamically as needed. // +// All the operations in this module are thread-safe. +// // # Dependencies and types of caches // // This package uses a source/transform/sink model of caches to build @@ -34,12 +36,6 @@ limitations under the License. // replaced with a new one, and saves the previous results in case an // error pops-up. // -// # Atomicity -// -// Most of the operations are not atomic/thread-safe, except for -// [Replaceable.Replace] which can be performed while the objects -// are being read. -// // # Etags // // Etags in this library is a cache version identifier. It doesn't @@ -54,6 +50,7 @@ package cached import ( "fmt" + "sync" "sync/atomic" ) @@ -100,14 +97,6 @@ type Data[T any] interface { Get() Result[T] } -// T is the source type, V is the destination type. -type merger[K comparable, T, V any] struct { - mergeFn func(map[K]Result[T]) Result[V] - caches map[K]Data[T] - cacheResults map[K]Result[T] - result Result[V] -} - // NewMerger creates a new merge cache, a cache that merges the result // of other caches. The function only gets called if any of the // dependency has changed. @@ -125,27 +114,89 @@ type merger[K comparable, T, V any] struct { // function will remerge all the dependencies together everytime. Since // the list of dependencies is constant, there is no way to save some // partial merge information either. +// +// Also note that Golang map iteration is not stable. If the mergeFn +// depends on the order iteration to be stable, it will need to +// implement its own sorting or iteration order. func NewMerger[K comparable, T, V any](mergeFn func(results map[K]Result[T]) Result[V], caches map[K]Data[T]) Data[V] { - return &merger[K, T, V]{ + listCaches := make([]Data[T], 0, len(caches)) + // maps from index to key + indexes := make(map[int]K, len(caches)) + i := 0 + for k := range caches { + listCaches = append(listCaches, caches[k]) + indexes[i] = k + i++ + } + + return NewListMerger(func(results []Result[T]) Result[V] { + if len(results) != len(indexes) { + panic(fmt.Errorf("invalid result length %d, expected %d", len(results), len(indexes))) + } + m := make(map[K]Result[T], len(results)) + for i := range results { + m[indexes[i]] = results[i] + } + return mergeFn(m) + }, listCaches) +} + +type listMerger[T, V any] struct { + lock sync.Mutex + mergeFn func([]Result[T]) Result[V] + caches []Data[T] + cacheResults []Result[T] + result Result[V] +} + +// NewListMerger creates a new merge cache that merges the results of +// other caches in list form. The function only gets called if any of +// the dependency has changed. +// +// The benefit of ListMerger over the basic Merger is that caches are +// stored in an ordered list so the order of the cache will be +// preserved in the order of the results passed to the mergeFn. +// +// If any of the dependency returned an error before, or any of the +// dependency returned an error this time, or if the mergeFn failed +// before, then the function is reran. +// +// Note that this assumes there is no "partial" merge, the merge +// function will remerge all the dependencies together everytime. Since +// the list of dependencies is constant, there is no way to save some +// partial merge information either. +func NewListMerger[T, V any](mergeFn func(results []Result[T]) Result[V], caches []Data[T]) Data[V] { + return &listMerger[T, V]{ mergeFn: mergeFn, caches: caches, } } -func (c *merger[K, T, V]) prepareResults() map[K]Result[T] { - cacheResults := make(map[K]Result[T], len(c.caches)) - for key, cache := range c.caches { - cacheResults[key] = cache.Get() +func (c *listMerger[T, V]) prepareResultsLocked() []Result[T] { + cacheResults := make([]Result[T], len(c.caches)) + ch := make(chan struct { + int + Result[T] + }, len(c.caches)) + for i := range c.caches { + go func(index int) { + ch <- struct { + int + Result[T] + }{ + index, + c.caches[index].Get(), + } + }(i) + } + for i := 0; i < len(c.caches); i++ { + res := <-ch + cacheResults[res.int] = res.Result } return cacheResults } -// Rerun if: -// - The last run resulted in an error -// - Any of the dependency previously returned an error -// - Any of the dependency just returned an error -// - Any of the dependency's etag changed -func (c *merger[K, T, V]) needsRunning(results map[K]Result[T]) bool { +func (c *listMerger[T, V]) needsRunningLocked(results []Result[T]) bool { if c.cacheResults == nil { return true } @@ -155,12 +206,8 @@ func (c *merger[K, T, V]) needsRunning(results map[K]Result[T]) bool { if len(results) != len(c.cacheResults) { panic(fmt.Errorf("invalid number of results: %v (expected %v)", len(results), len(c.cacheResults))) } - for key, oldResult := range c.cacheResults { - newResult, ok := results[key] - if !ok { - panic(fmt.Errorf("unknown cache entry: %v", key)) - } - + for i, oldResult := range c.cacheResults { + newResult := results[i] if newResult.Etag != oldResult.Etag || newResult.Err != nil || oldResult.Err != nil { return true } @@ -168,17 +215,17 @@ func (c *merger[K, T, V]) needsRunning(results map[K]Result[T]) bool { return false } -func (c *merger[K, T, V]) Get() Result[V] { - cacheResults := c.prepareResults() - if c.needsRunning(cacheResults) { +func (c *listMerger[T, V]) Get() Result[V] { + c.lock.Lock() + defer c.lock.Unlock() + cacheResults := c.prepareResultsLocked() + if c.needsRunningLocked(cacheResults) { c.cacheResults = cacheResults c.result = c.mergeFn(c.cacheResults) } return c.result } -type transformerCacheKeyType struct{} - // NewTransformer creates a new cache that transforms the result of // another cache. The transformFn will only be called if the source // cache has updated the output, otherwise, the cached result will be @@ -188,20 +235,17 @@ type transformerCacheKeyType struct{} // this time, or if the transformerFn failed before, the function is // reran. func NewTransformer[T, V any](transformerFn func(Result[T]) Result[V], source Data[T]) Data[V] { - return NewMerger(func(caches map[transformerCacheKeyType]Result[T]) Result[V] { - cache, ok := caches[transformerCacheKeyType{}] - if len(caches) != 1 || !ok { + return NewListMerger(func(caches []Result[T]) Result[V] { + if len(caches) != 1 { panic(fmt.Errorf("invalid cache for transformer cache: %v", caches)) } - return transformerFn(cache) - }, map[transformerCacheKeyType]Data[T]{ - {}: source, - }) + return transformerFn(caches[0]) + }, []Data[T]{source}) } // NewSource creates a new cache that generates some data. This // will always be called since we don't know the origin of the data and -// if it needs to be updated or not. +// if it needs to be updated or not. sourceFn MUST be thread-safe. func NewSource[T any](sourceFn func() Result[T]) Data[T] { c := source[T](sourceFn) return &c @@ -222,25 +266,24 @@ func NewStaticSource[T any](staticFn func() Result[T]) Data[T] { } type static[T any] struct { + once sync.Once fn func() Result[T] - result *Result[T] + result Result[T] } func (c *static[T]) Get() Result[T] { - if c.result == nil { - result := c.fn() - c.result = &result - } - return *c.result + c.once.Do(func() { + c.result = c.fn() + }) + return c.result } -// Replaceable is a cache that carries the result even when the -// cache is replaced. The cache can be replaced atomically (without any -// lock held). This is the type that should typically be stored in +// Replaceable is a cache that carries the result even when the cache is +// replaced. This is the type that should typically be stored in // structs. type Replaceable[T any] struct { cache atomic.Pointer[Data[T]] - result *Result[T] + result atomic.Pointer[Result[T]] } // Get retrieves the data from the underlying source. [Replaceable] @@ -251,14 +294,19 @@ type Replaceable[T any] struct { // failure is returned. func (c *Replaceable[T]) Get() Result[T] { result := (*c.cache.Load()).Get() - if result.Err != nil && c.result != nil && c.result.Err == nil { - return *c.result + + for { + cResult := c.result.Load() + if result.Err != nil && cResult != nil && cResult.Err == nil { + return *cResult + } + if c.result.CompareAndSwap(cResult, &result) { + return result + } } - c.result = &result - return *c.result } -// Replace changes the cache in a thread-safe way. +// Replace changes the cache. func (c *Replaceable[T]) Replace(cache Data[T]) { c.cache.Swap(&cache) } diff --git a/vendor/k8s.io/kube-openapi/pkg/handler3/handler.go b/vendor/k8s.io/kube-openapi/pkg/handler3/handler.go index 66b7a68d..2263e2f3 100644 --- a/vendor/k8s.io/kube-openapi/pkg/handler3/handler.go +++ b/vendor/k8s.io/kube-openapi/pkg/handler3/handler.go @@ -30,7 +30,7 @@ import ( "time" "github.com/golang/protobuf/proto" - openapi_v3 "github.com/google/gnostic/openapiv3" + openapi_v3 "github.com/google/gnostic-models/openapiv3" "github.com/google/uuid" "github.com/munnerz/goautoneg" "k8s.io/klog/v2" diff --git a/vendor/k8s.io/kube-openapi/pkg/util/proto/document.go b/vendor/k8s.io/kube-openapi/pkg/util/proto/document.go index 763923df..5789e67a 100644 --- a/vendor/k8s.io/kube-openapi/pkg/util/proto/document.go +++ b/vendor/k8s.io/kube-openapi/pkg/util/proto/document.go @@ -21,7 +21,7 @@ import ( "sort" "strings" - openapi_v2 "github.com/google/gnostic/openapiv2" + openapi_v2 "github.com/google/gnostic-models/openapiv2" "gopkg.in/yaml.v2" ) diff --git a/vendor/k8s.io/kube-openapi/pkg/util/proto/document_v3.go b/vendor/k8s.io/kube-openapi/pkg/util/proto/document_v3.go index 519dcf2e..d9f2896e 100644 --- a/vendor/k8s.io/kube-openapi/pkg/util/proto/document_v3.go +++ b/vendor/k8s.io/kube-openapi/pkg/util/proto/document_v3.go @@ -21,7 +21,7 @@ import ( "reflect" "strings" - openapi_v3 "github.com/google/gnostic/openapiv3" + openapi_v3 "github.com/google/gnostic-models/openapiv3" "gopkg.in/yaml.v3" ) diff --git a/vendor/k8s.io/kube-openapi/pkg/validation/spec/gnostic.go b/vendor/k8s.io/kube-openapi/pkg/validation/spec/gnostic.go index 406a09d9..6a77f2ac 100644 --- a/vendor/k8s.io/kube-openapi/pkg/validation/spec/gnostic.go +++ b/vendor/k8s.io/kube-openapi/pkg/validation/spec/gnostic.go @@ -21,7 +21,7 @@ import ( "strconv" "github.com/go-openapi/jsonreference" - openapi_v2 "github.com/google/gnostic/openapiv2" + openapi_v2 "github.com/google/gnostic-models/openapiv2" ) // Interfaces diff --git a/vendor/k8s.io/utils/nsenter/nsenter.go b/vendor/k8s.io/utils/nsenter/nsenter.go index 237b636b..6f847db8 100644 --- a/vendor/k8s.io/utils/nsenter/nsenter.go +++ b/vendor/k8s.io/utils/nsenter/nsenter.go @@ -49,26 +49,28 @@ type Nsenter = NSEnter // // NSEnter requires: // -// 1. Docker >= 1.6 due to the dependency on the slave propagation mode +// 1. Docker >= 1.6 due to the dependency on the slave propagation mode // of the bind-mount of the kubelet root directory in the container. // Docker 1.5 used a private propagation mode for bind-mounts, so mounts // performed in the host's mount namespace do not propagate out to the // bind-mount in this docker version. -// 2. The host's root filesystem must be available at /rootfs -// 3. The nsenter binary must be on the Kubelet process' PATH in the container's +// 2. The host's root filesystem must be available at /rootfs +// 3. The nsenter binary must be on the Kubelet process' PATH in the container's // filesystem. -// 4. The Kubelet process must have CAP_SYS_ADMIN (required by nsenter); at +// 4. The Kubelet process must have CAP_SYS_ADMIN (required by nsenter); at // the present, this effectively means that the kubelet is running in a // privileged container. -// 5. The volume path used by the Kubelet must be the same inside and outside +// 5. The volume path used by the Kubelet must be the same inside and outside // the container and be writable by the container (to initialize volume) // contents. TODO: remove this requirement. -// 6. The host image must have "mount", "findmnt", "umount", "stat", "touch", +// 6. The host image must have "mount", "findmnt", "umount", "stat", "touch", // "mkdir", "ls", "sh" and "chmod" binaries in /bin, /usr/sbin, or /usr/bin -// 7. The host image should have systemd-run in /bin, /usr/sbin, or /usr/bin if +// 7. The host image should have systemd-run in /bin, /usr/sbin, or /usr/bin if // systemd is installed/enabled in the operating system. +// // For more information about mount propagation modes, see: -// https://www.kernel.org/doc/Documentation/filesystems/sharedsubtree.txt +// +// https://www.kernel.org/doc/Documentation/filesystems/sharedsubtree.txt type NSEnter struct { // a map of commands to their paths on the host filesystem paths map[string]string @@ -174,10 +176,13 @@ func (ne *NSEnter) SupportsSystemd() (string, bool) { // exist. When it's false, it evaluates symlinks of the existing part and // blindly adds the non-existing part: // pathname: /mnt/volume/non/existing/directory -// /mnt/volume exists -// non/existing/directory does not exist +// +// /mnt/volume exists +// non/existing/directory does not exist +// // -> It resolves symlinks in /mnt/volume to say /mnt/foo and returns -// /mnt/foo/non/existing/directory. +// +// /mnt/foo/non/existing/directory. // // BEWARE! EvalSymlinks is not able to detect symlink looks with mustExist=false! // If /tmp/link is symlink to /tmp/link, EvalSymlinks(/tmp/link/foo) returns /tmp/link/foo. diff --git a/vendor/k8s.io/utils/trace/trace.go b/vendor/k8s.io/utils/trace/trace.go index a0b07a6d..187eb5d8 100644 --- a/vendor/k8s.io/utils/trace/trace.go +++ b/vendor/k8s.io/utils/trace/trace.go @@ -65,6 +65,11 @@ func durationToMilliseconds(timeDuration time.Duration) int64 { } type traceItem interface { + // rLock must be called before invoking time or writeItem. + rLock() + // rUnlock must be called after processing the item is complete. + rUnlock() + // time returns when the trace was recorded as completed. time() time.Time // writeItem outputs the traceItem to the buffer. If stepThreshold is non-nil, only output the @@ -79,6 +84,10 @@ type traceStep struct { fields []Field } +// rLock doesn't need to do anything because traceStep instances are immutable. +func (s traceStep) rLock() {} +func (s traceStep) rUnlock() {} + func (s traceStep) time() time.Time { return s.stepTime } @@ -106,6 +115,14 @@ type Trace struct { traceItems []traceItem } +func (t *Trace) rLock() { + t.lock.RLock() +} + +func (t *Trace) rUnlock() { + t.lock.RUnlock() +} + func (t *Trace) time() time.Time { if t.endTime != nil { return *t.endTime @@ -231,8 +248,10 @@ func (t *Trace) logTrace() { func (t *Trace) writeTraceSteps(b *bytes.Buffer, formatter string, stepThreshold *time.Duration) { lastStepTime := t.startTime for _, stepOrTrace := range t.traceItems { + stepOrTrace.rLock() stepOrTrace.writeItem(b, formatter, lastStepTime, stepThreshold) lastStepTime = stepOrTrace.time() + stepOrTrace.rUnlock() } } diff --git a/vendor/modules.txt b/vendor/modules.txt index 0f705528..d88847cd 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -24,7 +24,7 @@ github.com/go-logr/logr/slogr # github.com/go-openapi/jsonpointer v0.19.6 ## explicit; go 1.13 github.com/go-openapi/jsonpointer -# github.com/go-openapi/jsonreference v0.20.1 +# github.com/go-openapi/jsonreference v0.20.2 ## explicit; go 1.13 github.com/go-openapi/jsonreference github.com/go-openapi/jsonreference/internal @@ -55,7 +55,13 @@ github.com/google/gnostic/compiler github.com/google/gnostic/extensions github.com/google/gnostic/jsonschema github.com/google/gnostic/openapiv2 -github.com/google/gnostic/openapiv3 +# github.com/google/gnostic-models v0.6.8 +## explicit; go 1.18 +github.com/google/gnostic-models/compiler +github.com/google/gnostic-models/extensions +github.com/google/gnostic-models/jsonschema +github.com/google/gnostic-models/openapiv2 +github.com/google/gnostic-models/openapiv3 # github.com/google/go-cmp v0.5.9 ## explicit; go 1.13 github.com/google/go-cmp/cmp @@ -79,7 +85,7 @@ github.com/json-iterator/go # github.com/kubernetes-csi/csi-lib-iscsi v0.0.0-20230429162816-0798dedca439 ## explicit; go 1.19 github.com/kubernetes-csi/csi-lib-iscsi/iscsi -# github.com/kubernetes-csi/csi-lib-utils v0.14.0 +# github.com/kubernetes-csi/csi-lib-utils v0.16.0 ## explicit; go 1.18 github.com/kubernetes-csi/csi-lib-utils/protosanitizer # github.com/mailru/easyjson v0.7.7 @@ -108,22 +114,22 @@ github.com/opencontainers/selinux/go-selinux github.com/opencontainers/selinux/go-selinux/label github.com/opencontainers/selinux/pkg/pwalk github.com/opencontainers/selinux/pkg/pwalkdir -# github.com/prometheus/client_golang v1.14.0 +# github.com/prometheus/client_golang v1.16.0 ## explicit; go 1.17 github.com/prometheus/client_golang/prometheus github.com/prometheus/client_golang/prometheus/collectors github.com/prometheus/client_golang/prometheus/internal github.com/prometheus/client_golang/prometheus/promhttp -# github.com/prometheus/client_model v0.3.0 -## explicit; go 1.9 +# github.com/prometheus/client_model v0.4.0 +## explicit; go 1.18 github.com/prometheus/client_model/go -# github.com/prometheus/common v0.38.0 -## explicit; go 1.17 +# github.com/prometheus/common v0.44.0 +## explicit; go 1.18 github.com/prometheus/common/expfmt github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg github.com/prometheus/common/model -# github.com/prometheus/procfs v0.8.0 -## explicit; go 1.17 +# github.com/prometheus/procfs v0.10.1 +## explicit; go 1.19 github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util @@ -267,7 +273,7 @@ gopkg.in/yaml.v2 # gopkg.in/yaml.v3 v3.0.1 ## explicit gopkg.in/yaml.v3 -# k8s.io/api v0.27.0 => k8s.io/api v0.26.11 +# k8s.io/api v0.28.0 => k8s.io/api v0.26.11 ## explicit; go 1.19 k8s.io/api/admissionregistration/v1 k8s.io/api/admissionregistration/v1alpha1 @@ -320,7 +326,7 @@ k8s.io/api/scheduling/v1beta1 k8s.io/api/storage/v1 k8s.io/api/storage/v1alpha1 k8s.io/api/storage/v1beta1 -# k8s.io/apimachinery v0.27.0 => k8s.io/apimachinery v0.26.11 +# k8s.io/apimachinery v0.28.0 => k8s.io/apimachinery v0.26.11 ## explicit; go 1.19 k8s.io/apimachinery/pkg/api/errors k8s.io/apimachinery/pkg/api/meta @@ -370,7 +376,7 @@ k8s.io/apimachinery/third_party/forked/golang/reflect ## explicit; go 1.19 k8s.io/apiserver/pkg/features k8s.io/apiserver/pkg/util/feature -# k8s.io/client-go v0.27.0 => k8s.io/client-go v0.26.11 +# k8s.io/client-go v0.28.0 => k8s.io/client-go v0.26.11 ## explicit; go 1.19 k8s.io/client-go/applyconfigurations/admissionregistration/v1 k8s.io/client-go/applyconfigurations/admissionregistration/v1alpha1 @@ -609,7 +615,7 @@ k8s.io/client-go/util/workqueue # k8s.io/cloud-provider v0.26.11 => k8s.io/cloud-provider v0.26.11 ## explicit; go 1.19 k8s.io/cloud-provider -# k8s.io/component-base v0.26.11 => k8s.io/component-base v0.26.11 +# k8s.io/component-base v0.28.0 => k8s.io/component-base v0.26.11 ## explicit; go 1.19 k8s.io/component-base/featuregate k8s.io/component-base/metrics @@ -632,8 +638,8 @@ k8s.io/klog/v2/internal/dbg k8s.io/klog/v2/internal/serialize k8s.io/klog/v2/internal/severity k8s.io/klog/v2/internal/sloghandler -# k8s.io/kube-openapi v0.0.0-20230308215209-15aac26d736a -## explicit; go 1.18 +# k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9 +## explicit; go 1.19 k8s.io/kube-openapi/pkg/builder3/util k8s.io/kube-openapi/pkg/cached k8s.io/kube-openapi/pkg/common @@ -670,7 +676,7 @@ k8s.io/kubernetes/pkg/volume/util/volumepathhandler # k8s.io/mount-utils v0.26.11 => k8s.io/mount-utils v0.26.11 ## explicit; go 1.19 k8s.io/mount-utils -# k8s.io/utils v0.0.0-20230209194617-a36077c30491 +# k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 ## explicit; go 1.18 k8s.io/utils/buffer k8s.io/utils/clock