From 1a72902b57b0d22cda3714cecd612a05a1775308 Mon Sep 17 00:00:00 2001 From: Patrick Marabeas Date: Fri, 31 Jan 2020 09:26:04 +1100 Subject: [PATCH 1/4] Initialize GraphQL client This will allow for resources and data sources to be uplifted on a case by case basis. * Add shurcool/graphqlv4 * Initialize a GraphQL client (v4client) * Rename client to v3client --- github/config.go | 40 +++++-- github/provider.go | 4 +- go.mod | 3 + go.sum | 182 ++++++++++++++++++++++++++++++- website/docs/index.html.markdown | 4 +- 5 files changed, 214 insertions(+), 19 deletions(-) diff --git a/github/config.go b/github/config.go index 7edb3eec33..862fde2467 100644 --- a/github/config.go +++ b/github/config.go @@ -6,9 +6,11 @@ import ( "fmt" "net/http" "net/url" + "path" "github.com/google/go-github/v29/github" "github.com/hashicorp/terraform-plugin-sdk/helper/logging" + "github.com/shurcooL/githubv4" "golang.org/x/oauth2" ) @@ -24,12 +26,13 @@ type Config struct { type Organization struct { name string id int64 - client *github.Client + v3client *github.Client + v4client *githubv4.Client StopContext context.Context } -// Client configures and returns a fully initialized GithubClient -func (c *Config) Client() (interface{}, error) { +// Clients configures and returns a fully initialized GithubClient and Githubv4Client +func (c *Config) Clients() (interface{}, error) { var org Organization var ts oauth2.TokenSource var tc *http.Client @@ -74,22 +77,37 @@ func (c *Config) Client() (interface{}, error) { tc.Transport = NewRateLimitTransport(tc.Transport) tc.Transport = logging.NewTransport("Github", tc.Transport) - org.client = github.NewClient(tc) + // Create GraphQL Client + uv4, err := url.Parse(c.BaseURL) + if err != nil { + return nil, err + } + uv4.Path = path.Join(uv4.Path, "graphql") + v4client := githubv4.NewEnterpriseClient(uv4.String(), tc) - if c.BaseURL != "" { - u, err := url.Parse(c.BaseURL) - if err != nil { - return nil, err - } - org.client.BaseURL = u + // Create Rest Client + uv3, err := url.Parse(c.BaseURL) + if err != nil { + return nil, err } + if uv3.String() != "https://api.github.com/" { + uv3.Path = uv3.Path + "v3/" + } + v3client, err := github.NewEnterpriseClient(uv3.String(), "", tc) + if err != nil { + return nil, err + } + v3client.BaseURL = uv3 + + org.v3client = v3client + org.v4client = v4client if c.Individual { org.name = "" } else { org.name = c.Organization - remoteOrg, _, err := org.client.Organizations.Get(ctx, org.name) + remoteOrg, _, err := org.v3client.Organizations.Get(ctx, org.name) if err != nil { return nil, err } diff --git a/github/provider.go b/github/provider.go index 6ba1ccb7c9..56aaf1f7f8 100644 --- a/github/provider.go +++ b/github/provider.go @@ -23,7 +23,7 @@ func Provider() terraform.ResourceProvider { "base_url": { Type: schema.TypeString, Optional: true, - DefaultFunc: schema.EnvDefaultFunc("GITHUB_BASE_URL", ""), + DefaultFunc: schema.EnvDefaultFunc("GITHUB_BASE_URL", "https://api.github.com/"), Description: descriptions["base_url"], }, "insecure": { @@ -122,7 +122,7 @@ func providerConfigure(p *schema.Provider) schema.ConfigureFunc { Anonymous: d.Get("anonymous").(bool), } - meta, err := config.Client() + meta, err := config.Clients() if err != nil { return nil, err } diff --git a/go.mod b/go.mod index 4f8d691ce0..da4ffa6473 100644 --- a/go.mod +++ b/go.mod @@ -4,8 +4,11 @@ go 1.13 require ( github.com/google/go-github/v29 v29.0.3 + github.com/hashicorp/terraform v0.12.24 github.com/hashicorp/terraform-plugin-sdk v1.7.0 github.com/kylelemons/godebug v1.1.0 + github.com/shurcooL/githubv4 v0.0.0-20191127044304-8f68eb5628d0 + github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f // indirect golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d ) diff --git a/go.sum b/go.sum index ad5036228d..5392b036dc 100644 --- a/go.sum +++ b/go.sum @@ -7,12 +7,41 @@ cloud.google.com/go v0.45.1 h1:lRi0CHyU+ytlvylOlFKKq0af6JncuyoRh1J+QJBqQx0= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +github.com/Azure/azure-sdk-for-go v35.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v36.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= +github.com/Azure/go-autorest/autorest v0.9.2/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= +github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= +github.com/Azure/go-autorest/autorest/adal v0.8.1-0.20191028180845-3492b2aff503/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= +github.com/Azure/go-autorest/autorest/azure/cli v0.2.0/go.mod h1:WWTbGPvkAg3I4ms2j2s+Zr5xCGwGqTQh+6M2ZqOczkE= +github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= +github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= +github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= +github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA= +github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI= +github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= +github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= +github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/ChrisTrenkamp/goxpath v0.0.0-20170922090931-c385f95c6022/go.mod h1:nuWgzSkT5PnyOd+272uUmV0dnAnAn42Mk7PiQC5VzN4= +github.com/QcloudApi/qcloud_sign_golang v0.0.0-20141224014652-e4130a326409/go.mod h1:1pk82RBxDY/JZnPQrtqHlUFfCctgdorsd9M06fMynOM= +github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292/go.mod h1:KYCjqMOeHpNuTOiFQU6WEcTG7poCJrUs0YgyHNtn1no= +github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af/go.mod h1:5Jv4cbFiHJMsVxt52+i0Ha45fjshj6wxYr1r19tB9bw= github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agl/ed25519 v0.0.0-20150830182803-278e1ec8e8a6/go.mod h1:WPjqKcmVOxf0XSf3YxCJs6N6AOSrOx3obionmG7T0y0= github.com/agl/ed25519 v0.0.0-20170116200512-5312a6153412/go.mod h1:WPjqKcmVOxf0XSf3YxCJs6N6AOSrOx3obionmG7T0y0= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/aliyun/alibaba-cloud-sdk-go v0.0.0-20190329064014-6e358769c32a/go.mod h1:T9M45xf79ahXVelWoOBmH0y4aC1t5kXO5BxwyakgIGA= +github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190103054945-8205d1f41e70/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= +github.com/aliyun/aliyun-tablestore-go-sdk v4.1.2+incompatible/go.mod h1:LDQHRZylxvcg8H7wBIDfvO5g/cy4/sz1iucBlc2l3Jw= +github.com/antchfx/xpath v0.0.0-20190129040759-c8489ed3251e/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk= +github.com/antchfx/xquery v0.0.0-20180515051857-ad5b8c7a47b0/go.mod h1:LzD22aAzDP8/dyiCKFp31He4m2GPjl0AFyzDtZzUu9M= github.com/apparentlymart/go-cidr v1.0.1 h1:NmIwLZ/KdsjIUlhf+/Np40atNXm/+lZ5txfTJ/SpF+U= github.com/apparentlymart/go-cidr v1.0.1/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= @@ -20,27 +49,56 @@ github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0 h1:MzVXffFU github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0= github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= +github.com/apparentlymart/go-versions v0.0.2-0.20180815153302-64b99f7cb171/go.mod h1:JXY95WvQrPJQtudvNARshgWajS7jNNlM90altXIPNyI= +github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM= github.com/aws/aws-sdk-go v1.25.3 h1:uM16hIw9BotjZKMZlX05SN2EFtaWfi/NonPKIARiBLQ= github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/bmatcuk/doublestar v1.1.5/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= +github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps= github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/coreos/bbolt v1.3.0/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= +github.com/dnaeon/go-vcr v0.0.0-20180920040454-5637cf3d8a31/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= +github.com/dylanmei/iso8601 v0.1.0/go.mod h1:w9KhXSgIyROl1DefbMYIE7UVSIvELTbMrCfx+QkYnoQ= +github.com/dylanmei/winrmtest v0.0.0-20190225150635-99b7fe2fddf1/go.mod h1:lcy9/2gH1jn/VCLouHA6tOEwLoNVd4GW6zhuKLmHC2Y= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20180513044358-24b0969c4cb7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1 h1:qGJ6qTW+x6xX/my+8YUVl4WNpX9B7+/l2tRsHGZ7f2s= @@ -50,6 +108,7 @@ github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -70,22 +129,45 @@ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968/go.mod h1:3WdhXV3rUYy9p6AUW8d94kr+HS62Y4VL9mBnFxsD8q4= +github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01/go.mod h1:wjDF8z83zTeg5eMLml5EBSlAhbF7G8DobyI1YsMuyzw= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/hashicorp/aws-sdk-go-base v0.4.0/go.mod h1:eRhlz3c4nhqxFZJAahJEFL7gh6Jyj5rQmQc7F9eHFyQ= +github.com/hashicorp/consul v0.0.0-20171026175957-610f3c86a089/go.mod h1:mFrjN1mfidgJfYP1xrJCF+AfRhr6Eaqhb2+sfyn/OOI= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-azure-helpers v0.10.0/go.mod h1:YuAtHxm2v74s+IjQwUG88dHBJPd5jL+cXr5BGVzSKhE= +github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-getter v1.4.0 h1:ENHNi8494porjD0ZhIrjlAHnveSFhY7hvOJrV/fsKkw= github.com/hashicorp/go-getter v1.4.0/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY= +github.com/hashicorp/go-getter v1.4.2-0.20200106182914-9813cbd4eb02 h1:l1KB3bHVdvegcIf5upQ5mjcHjs2qsWnKh4Yr9xgIuu8= +github.com/hashicorp/go-getter v1.4.2-0.20200106182914-9813cbd4eb02/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY= github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= +github.com/hashicorp/go-hclog v0.0.0-20181001195459-61d530d6c27f/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI= github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-immutable-radix v0.0.0-20180129170900-7f3cd4390caa/go.mod h1:6ij3Z20p+OhOkCSrA0gImAWoHYQRGbnlcuk6XYTiaRw= +github.com/hashicorp/go-msgpack v0.5.4/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-plugin v1.0.1-0.20190610192547-a1bc61569a26/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY= github.com/hashicorp/go-plugin v1.0.1 h1:4OtAfUGbnKC6yS48p0CtMX2oFYtzFZVv6rok3cRWgnE= github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY= +github.com/hashicorp/go-retryablehttp v0.5.2/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= +github.com/hashicorp/go-slug v0.4.1/go.mod h1:I5tq5Lv0E2xcNXNkmx7BSfzi1PsJ2cNjs3cC3LwyhK8= +github.com/hashicorp/go-sockaddr v0.0.0-20180320115054-6d291a969b86/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-tfe v0.3.27/go.mod h1:DVPSW2ogH+M9W1/i50ASgMht8cHP7NxxK0nrY9aFikQ= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= @@ -98,10 +180,19 @@ github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f h1:UdxlrJz4JOnY8W+Db github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= github.com/hashicorp/hcl/v2 v2.0.0 h1:efQznTz+ydmQXq3BOnRa3AXzvCeTq1P4dKj/z5GLlY8= github.com/hashicorp/hcl/v2 v2.0.0/go.mod h1:oVVDG71tEinNGYCxinCYadcmKU9bglqW9pV3txagJ90= +github.com/hashicorp/hcl/v2 v2.3.0 h1:iRly8YaMwTBAKhn1Ybk7VSdzbnopghktCD031P8ggUE= +github.com/hashicorp/hcl/v2 v2.3.0/go.mod h1:d+FwDBbOLvpAM3Z6J7gPj/VoAGkNe/gm352ZhjJ/Zv8= +github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590/go.mod h1:n2TSygSNwsLJ76m8qFXTSc7beTb+auJxYdqrnoqwZWE= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/memberlist v0.1.0/go.mod h1:ncdBp14cuox2iFOq3kDiquKU6fqsTBc3W6JvZwjxxsE= +github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb/go.mod h1:h/Ru6tmZazX7WO/GDmwdpS975F019L4t5ng5IgwbNrE= +github.com/hashicorp/terraform v0.12.24 h1:lTTswsCcmTOhTwuUl2NdjtJBCNdGqZmRGQi0cjFHYOM= +github.com/hashicorp/terraform v0.12.24/go.mod h1:eJcloDEx5ywM4a1tetIuVrlqklM0bUVRYJBYAh4CYzA= github.com/hashicorp/terraform-config-inspect v0.0.0-20191115094559-17f92b0546e8 h1:+RyjwU+Gnd/aTJBPZVDNm903eXVjjqhbaR4Ypx3xYyY= github.com/hashicorp/terraform-config-inspect v0.0.0-20191115094559-17f92b0546e8/go.mod h1:p+ivJws3dpqbp1iP84+npOyAmTTOLMgCzrXd3GSdn/A= +github.com/hashicorp/terraform-config-inspect v0.0.0-20191212124732-c6ae6269b9d7 h1:Pc5TCv9mbxFN6UVX0LH6CpQrdTM5YjbVI2w15237Pjk= +github.com/hashicorp/terraform-config-inspect v0.0.0-20191212124732-c6ae6269b9d7/go.mod h1:p+ivJws3dpqbp1iP84+npOyAmTTOLMgCzrXd3GSdn/A= github.com/hashicorp/terraform-json v0.4.0 h1:KNh29iNxozP5adfUFBJ4/fWd0Cu3taGgjHB38JYqOF4= github.com/hashicorp/terraform-json v0.4.0/go.mod h1:eAbqb4w0pSlRmdvl8fOyHAi/+8jnkVYN28gJkSJrLhU= github.com/hashicorp/terraform-plugin-sdk v1.7.0 h1:B//oq0ZORG+EkVrIJy0uPGSonvmXqxSzXe8+GhknoW0= @@ -110,15 +201,23 @@ github.com/hashicorp/terraform-plugin-test v1.2.0 h1:AWFdqyfnOj04sxTdaAF57QqvW7X github.com/hashicorp/terraform-plugin-test v1.2.0/go.mod h1:QIJHYz8j+xJtdtLrFTlzQVC0ocr3rf/OjIpgZLK56Hs= github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596 h1:hjyO2JsNZUKT1ym+FAdlBEkGPevazYsmVgIMw7dVELg= github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596/go.mod h1:kNDNcF7sN4DocDLBkQYz73HGKwN1ANB1blq4lIYLYvg= -github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb h1:b5rjCoWHc7eqmAS4/qyk21ZsHyb6Mxv/jykxvNTkU4M= +github.com/hashicorp/vault v0.10.4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bAbosPMpP0= github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ= github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/joyent/triton-go v0.0.0-20180313100802-d8f9c0314926/go.mod h1:U+RSyWxWd04xTqnuOQxnai7XGS2PrPY2cfGoDKtMHjA= +github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -127,6 +226,17 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/likexian/gokit v0.0.0-20190309162924-0a377eecf7aa/go.mod h1:QdfYv6y6qPA9pbBA2qXtoT8BMKha6UyNbxWGWl/9Jfk= +github.com/likexian/gokit v0.0.0-20190418170008-ace88ad0983b/go.mod h1:KKqSnk/VVSW8kEyO2vVCXoanzEutKdlBAPohmGXkxCk= +github.com/likexian/gokit v0.0.0-20190501133040-e77ea8b19cdc/go.mod h1:3kvONayqCaj+UgrRZGpgfXzHdMYCAO0KAt4/8n0L57Y= +github.com/likexian/gokit v0.20.15/go.mod h1:kn+nTv3tqh6yhor9BC4Lfiu58SmH8NmQ2PmEl+uM6nU= +github.com/likexian/simplejson-go v0.0.0-20190409170913-40473a74d76d/go.mod h1:Typ1BfnATYtZ/+/shXfFYLrovhFyuKvzwrdOnIDHlmg= +github.com/likexian/simplejson-go v0.0.0-20190419151922-c1f9f0b4f084/go.mod h1:U4O1vIJvIKwbMZKUJ62lppfdvkCdVd2nfMimHK81eec= +github.com/likexian/simplejson-go v0.0.0-20190502021454-d8787b4bfa0b/go.mod h1:3BWwtmKP9cXWwYCr5bkoVDEfLywacOv0s06OBEDpyt8= +github.com/lusis/go-artifactory v0.0.0-20160115162124-7e4ce345df82/go.mod h1:y54tfGmO3NKssKveTEFFzH8C/akrSOy/iW9qEAUDV84= +github.com/masterzen/simplexml v0.0.0-20160608183007-4572e39b1ab9/go.mod h1:kCEbxUJlNDEBNbdQMkPSp6yaKcRXVI6f4ddk8Riv4bc= +github.com/masterzen/winrm v0.0.0-20190223112901-5e5c9a7fe54b/go.mod h1:wr1VqkwW0AB5JS0QLy5GpVMS9E3VtRoSYXUYyVk46KY= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.1 h1:G1f5SKeVxmagw/IyvzvtZE4Gybcc4Tr1tf7I8z0XgOg= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= @@ -135,6 +245,9 @@ github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNx github.com/mattn/go-isatty v0.0.5 h1:tHXDdz1cpzGaovsTB+TVB8q90WEokoVmfMqoVcrLUgw= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-shellwords v1.0.4/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.0.8/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/mitchellh/cli v1.0.0 h1:iGBIsUe3+HZ/AD/Vd7DErOt5sU9fa8Uj7A2s1aggv1Y= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= @@ -144,43 +257,85 @@ github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFW github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-linereader v0.0.0-20190213213312-1b945b3263eb/go.mod h1:OaY7UOoTkkrX3wRwjpYRKafIkkyeD0UtweSHAWWiqQM= github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ= github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY= +github.com/mitchellh/panicwrap v1.0.0/go.mod h1:pKvZHwWrZowLUzftuFq7coarnxbBXU4aQh3N0BJOeeA= +github.com/mitchellh/prefixedio v0.0.0-20190213213902-5733675afd51/go.mod h1:kB1naBgV9ORnkiTVeyJOI1DavaJkG4oNIq0Af6ZVKUo= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.1 h1:FVzMWA5RllMAKIdUSC8mdWo3XtwoecrH79BY70sEEpE= github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/mozillazg/go-httpheader v0.2.1/go.mod h1:jJ8xECTlalr6ValeXYdOF8fFUISeBAdw6E61aqQma60= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U= github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/packer-community/winrmcp v0.0.0-20180102160824-81144009af58/go.mod h1:f6Izs6JvFTdnRbziASagjZ2vmf55NSIkC/weStxCHqk= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.1 h1:LrvDIY//XNo65Lq84G/akBuMGlawHvGBABv8f/ZN6DI= github.com/posener/complete v1.2.1/go.mod h1:6gapUrK/U1TAN7ciCoNRIdVC5sbdBTUh1DKN0g6uH7E= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/shurcooL/githubv4 v0.0.0-20191127044304-8f68eb5628d0 h1:T9uus1QvcPgeLShS30YOnnzk3r9Vvygp45muhlrufgY= +github.com/shurcooL/githubv4 v0.0.0-20191127044304-8f68eb5628d0/go.mod h1:hAF0iLZy4td2EX+/8Tw+4nodhlMrwN3HupfaXj3zkGo= +github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk= +github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/spf13/afero v1.2.1/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/svanharmelen/jsonapi v0.0.0-20180618144545-0c0828c3f16d/go.mod h1:BSTlc8jOjh0niykqEGVXOLXdi9o0r0kR8tCYiMvjFgw= +github.com/tencentcloud/tencentcloud-sdk-go v3.0.82+incompatible/go.mod h1:0PfYow01SHPMhKY31xa+EFz2RStxIqj6JFAJS+IkCi4= +github.com/tencentyun/cos-go-sdk-v5 v0.0.0-20190808065407-f07404cefc8c/go.mod h1:wk2XFUg6egk4tSDNZtXeKfe2G6690UVyt163PuUxBZk= +github.com/terraform-providers/terraform-provider-openstack v1.15.0/go.mod h1:2aQ6n/BtChAl1y2S60vebhyJyZXBsuAI5G4+lHrT1Ew= +github.com/tmc/grpc-websocket-proxy v0.0.0-20171017195756-830351dc03c6/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/ugorji/go v0.0.0-20180813092308-00b869d2f4a5/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ= github.com/ulikunitz/xz v0.5.5 h1:pFrO0lVpTBXLpYw+pnLj6TbvHuyjXMfjGeCwSqCVwok= github.com/ulikunitz/xz v0.5.5/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack v4.0.1+incompatible h1:RMF1enSPeKTlXrXdOcqjFUElywVZjjC6pqse21bKbEU= github.com/vmihailenco/msgpack v4.0.1+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= +github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= github.com/zclconf/go-cty v1.0.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= +github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty v1.2.1 h1:vGMsygfmeCl4Xb6OA5U5XVAaQZ69FvoG7X2jUtQujb8= github.com/zclconf/go-cty v1.2.1/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty-yaml v1.0.1 h1:up11wlgAaDvlAGENcFDnZgkn0qUJurso7k6EpURKNF8= @@ -188,10 +343,16 @@ github.com/zclconf/go-cty-yaml v1.0.1/go.mod h1:IP3Ylp0wQpYm50IHK8OZWKMu6sPJIUgK go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0 h1:C9hSCOW830chIVkdja34wa6Ky+IzWllkUinR+BtRZd4= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190222235706-ffb98f73852f/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586 h1:7KByu05hhLed2MO29w7p1XfZvZ13m8mub3shuVftRs0= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d h1:1ZiEyfaQIg3Qh0EoqpwAakHVhecoE5wlSg5GjnafJGw= golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -207,7 +368,10 @@ golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -229,8 +393,12 @@ golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -279,13 +447,19 @@ google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98 google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1 h1:j6XxA85m/6txkUCHvzlV5f+HBNl/1r5cZ2A/3IEFOO8= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0 h1:AzbTB6ux+okLTzP8Ru1Xs41C303zdcfEht7MQnYJt5A= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/website/docs/index.html.markdown b/website/docs/index.html.markdown index caddd15866..e018bb4cb9 100644 --- a/website/docs/index.html.markdown +++ b/website/docs/index.html.markdown @@ -44,8 +44,8 @@ The following arguments are supported in the `provider` block: * `base_url` - (Optional) This is the target GitHub base API endpoint. Providing a value is a requirement when working with GitHub Enterprise. It is optional to provide this value and - it can also be sourced from the `GITHUB_BASE_URL` environment variable. The value must end with a slash, - and generally includes the API version, for instance `https://github.someorg.example/api/v3/`. + it can also be sourced from the `GITHUB_BASE_URL` environment variable. The value must end with a slash. + `https://github.someorg.example/api/`. * `insecure` - (Optional) Whether server should be accessed without verifying the TLS certificate. As the name suggests **this is insecure** and should not be used beyond experiments, From 1815645230282b35ad651ab50fc610f2af5e1c87 Mon Sep 17 00:00:00 2001 From: Patrick Marabeas Date: Fri, 31 Jan 2020 09:26:52 +1100 Subject: [PATCH 2/4] Update client usage to v3client --- github/data_source_github_actions_public_key.go | 2 +- github/data_source_github_collaborators.go | 2 +- github/data_source_github_ip_ranges.go | 2 +- github/data_source_github_membership.go | 2 +- github/data_source_github_release.go | 2 +- github/data_source_github_repositories.go | 2 +- github/data_source_github_repository.go | 2 +- github/data_source_github_team.go | 2 +- github/data_source_github_user.go | 2 +- github/resource_github_actions_secret.go | 8 ++++---- github/resource_github_actions_secret_test.go | 4 ++-- github/resource_github_branch_protection.go | 12 ++++++------ github/resource_github_branch_protection_test.go | 4 ++-- github/resource_github_issue_label.go | 6 +++--- github/resource_github_issue_label_test.go | 4 ++-- github/resource_github_membership.go | 6 +++--- github/resource_github_membership_test.go | 6 +++--- github/resource_github_organization_project.go | 8 ++++---- github/resource_github_organization_project_test.go | 4 ++-- github/resource_github_organization_webhook.go | 8 ++++---- github/resource_github_organization_webhook_test.go | 4 ++-- github/resource_github_project_column.go | 8 ++++---- github/resource_github_project_column_test.go | 4 ++-- github/resource_github_repository.go | 8 ++++---- github/resource_github_repository_collaborator.go | 6 +++--- .../resource_github_repository_collaborator_test.go | 8 ++++---- github/resource_github_repository_deploy_key.go | 6 +++--- github/resource_github_repository_deploy_key_test.go | 4 ++-- github/resource_github_repository_file.go | 10 +++++----- github/resource_github_repository_file_test.go | 6 +++--- github/resource_github_repository_project.go | 8 ++++---- github/resource_github_repository_project_test.go | 4 ++-- github/resource_github_repository_test.go | 12 +++++++----- github/resource_github_repository_webhook.go | 8 ++++---- github/resource_github_repository_webhook_test.go | 4 ++-- github/resource_github_team.go | 8 ++++---- github/resource_github_team_membership.go | 6 +++--- github/resource_github_team_membership_test.go | 6 +++--- github/resource_github_team_repository.go | 8 ++++---- github/resource_github_team_repository_test.go | 4 ++-- github/resource_github_team_test.go | 4 ++-- github/resource_github_user_gpg_key.go | 6 +++--- github/resource_github_user_gpg_key_test.go | 4 ++-- github/resource_github_user_invitation_accepter.go | 2 +- github/resource_github_user_ssh_key.go | 6 +++--- github/resource_github_user_ssh_key_test.go | 4 ++-- github/resource_organization_block.go | 6 +++--- github/resource_organization_block_test.go | 4 ++-- github/sweeper_test.go | 2 +- 49 files changed, 130 insertions(+), 128 deletions(-) diff --git a/github/data_source_github_actions_public_key.go b/github/data_source_github_actions_public_key.go index 6cb9e32931..fae9c13b89 100644 --- a/github/data_source_github_actions_public_key.go +++ b/github/data_source_github_actions_public_key.go @@ -38,7 +38,7 @@ func dataSourceGithubActionsPublicKeyRead(d *schema.ResourceData, meta interface owner := meta.(*Organization).name log.Printf("[INFO] Refreshing GitHub Actions Public Key from: %s/%s", owner, repository) - client := meta.(*Organization).client + client := meta.(*Organization).v3client ctx := context.Background() publicKey, _, err := client.Actions.GetPublicKey(ctx, owner, repository) diff --git a/github/data_source_github_collaborators.go b/github/data_source_github_collaborators.go index dd96347fcb..282231c6f2 100644 --- a/github/data_source_github_collaborators.go +++ b/github/data_source_github_collaborators.go @@ -110,7 +110,7 @@ func dataSourceGithubCollaborators() *schema.Resource { func dataSourceGithubCollaboratorsRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client ctx := context.Background() owner := d.Get("owner").(string) diff --git a/github/data_source_github_ip_ranges.go b/github/data_source_github_ip_ranges.go index e8e98f390b..7449571e18 100644 --- a/github/data_source_github_ip_ranges.go +++ b/github/data_source_github_ip_ranges.go @@ -36,7 +36,7 @@ func dataSourceGithubIpRanges() *schema.Resource { func dataSourceGithubIpRangesRead(d *schema.ResourceData, meta interface{}) error { org := meta.(*Organization) - api, _, err := org.client.APIMeta(org.StopContext) + api, _, err := org.v3client.APIMeta(org.StopContext) if err != nil { return err } diff --git a/github/data_source_github_membership.go b/github/data_source_github_membership.go index b2cc4c6e8b..ddd1e17d94 100644 --- a/github/data_source_github_membership.go +++ b/github/data_source_github_membership.go @@ -32,7 +32,7 @@ func dataSourceGithubMembershipRead(d *schema.ResourceData, meta interface{}) er username := d.Get("username").(string) log.Printf("[INFO] Refreshing GitHub membership: %s", username) - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name ctx := context.Background() diff --git a/github/data_source_github_release.go b/github/data_source_github_release.go index 83d72de182..fd3c452e9b 100644 --- a/github/data_source_github_release.go +++ b/github/data_source_github_release.go @@ -102,7 +102,7 @@ func dataSourceGithubReleaseRead(d *schema.ResourceData, meta interface{}) error repository := d.Get("repository").(string) owner := d.Get("owner").(string) - client := meta.(*Organization).client + client := meta.(*Organization).v3client ctx := context.Background() var err error diff --git a/github/data_source_github_repositories.go b/github/data_source_github_repositories.go index c6605b6a18..99238a3e85 100644 --- a/github/data_source_github_repositories.go +++ b/github/data_source_github_repositories.go @@ -48,7 +48,7 @@ func dataSourceGithubRepositoriesRead(d *schema.ResourceData, meta interface{}) return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client query := d.Get("query").(string) opt := &github.SearchOptions{ diff --git a/github/data_source_github_repository.go b/github/data_source_github_repository.go index 74bc7d31d5..2ed4566a1d 100644 --- a/github/data_source_github_repository.go +++ b/github/data_source_github_repository.go @@ -112,7 +112,7 @@ func dataSourceGithubRepositoryRead(d *schema.ResourceData, meta interface{}) er return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name var repoName string diff --git a/github/data_source_github_team.go b/github/data_source_github_team.go index 803e83bd5b..0d93d65b16 100644 --- a/github/data_source_github_team.go +++ b/github/data_source_github_team.go @@ -52,7 +52,7 @@ func dataSourceGithubTeamRead(d *schema.ResourceData, meta interface{}) error { slug := d.Get("slug").(string) log.Printf("[INFO] Refreshing GitHub Team: %s", slug) - client := meta.(*Organization).client + client := meta.(*Organization).v3client ctx := context.Background() team, err := getGithubTeamBySlug(ctx, client, meta.(*Organization).name, slug) diff --git a/github/data_source_github_user.go b/github/data_source_github_user.go index c22c96e1ef..dad0bd109a 100644 --- a/github/data_source_github_user.go +++ b/github/data_source_github_user.go @@ -103,7 +103,7 @@ func dataSourceGithubUserRead(d *schema.ResourceData, meta interface{}) error { username := d.Get("username").(string) log.Printf("[INFO] Refreshing GitHub User: %s", username) - client := meta.(*Organization).client + client := meta.(*Organization).v3client ctx := context.Background() user, _, err := client.Users.Get(ctx, username) diff --git a/github/resource_github_actions_secret.go b/github/resource_github_actions_secret.go index 94dcee3786..c1490d5b5f 100644 --- a/github/resource_github_actions_secret.go +++ b/github/resource_github_actions_secret.go @@ -50,7 +50,7 @@ func resourceGithubActionsSecretCreateOrUpdate(d *schema.ResourceData, meta inte return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client owner := meta.(*Organization).name ctx := context.Background() @@ -90,7 +90,7 @@ func resourceGithubActionsSecretRead(d *schema.ResourceData, meta interface{}) e return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client owner := meta.(*Organization).name ctx := context.Background() @@ -118,7 +118,7 @@ func resourceGithubActionsSecretDelete(d *schema.ResourceData, meta interface{}) return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name ctx := context.WithValue(context.Background(), ctxId, d.Id()) @@ -134,7 +134,7 @@ func resourceGithubActionsSecretDelete(d *schema.ResourceData, meta interface{}) } func getPublicKeyDetails(owner, repository string, meta interface{}) (keyId, pkValue string, err error) { - client := meta.(*Organization).client + client := meta.(*Organization).v3client ctx := context.Background() publicKey, _, err := client.Actions.GetPublicKey(ctx, owner, repository) diff --git a/github/resource_github_actions_secret_test.go b/github/resource_github_actions_secret_test.go index 00c1c79271..47b9468a42 100644 --- a/github/resource_github_actions_secret_test.go +++ b/github/resource_github_actions_secret_test.go @@ -77,7 +77,7 @@ func testAccCheckGithubActionsSecretExists(resourceName, secretName string, t *t } org := testAccProvider.Meta().(*Organization) - conn := org.client + conn := org.v3client _, _, err := conn.Actions.GetSecret(context.TODO(), org.name, repoName, secretName) if err != nil { t.Log("Failed to get secret") @@ -89,7 +89,7 @@ func testAccCheckGithubActionsSecretExists(resourceName, secretName string, t *t } func testAccCheckGithubActionsSecretDestroy(s *terraform.State) error { - client := testAccProvider.Meta().(*Organization).client + client := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_actions_secret" { diff --git a/github/resource_github_branch_protection.go b/github/resource_github_branch_protection.go index 70bbff0de2..cccd9493fc 100644 --- a/github/resource_github_branch_protection.go +++ b/github/resource_github_branch_protection.go @@ -155,7 +155,7 @@ func resourceGithubBranchProtectionCreate(d *schema.ResourceData, meta interface return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Get("repository").(string) @@ -198,7 +198,7 @@ func resourceGithubBranchProtectionRead(d *schema.ResourceData, meta interface{} return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoName, branch, err := parseTwoPartID(d.Id(), "repository", "branch") if err != nil { @@ -264,7 +264,7 @@ func resourceGithubBranchProtectionUpdate(d *schema.ResourceData, meta interface return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoName, branch, err := parseTwoPartID(d.Id(), "repository", "branch") if err != nil { return err @@ -320,7 +320,7 @@ func resourceGithubBranchProtectionDelete(d *schema.ResourceData, meta interface return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoName, branch, err := parseTwoPartID(d.Id(), "repository", "branch") if err != nil { return err @@ -381,7 +381,7 @@ func flattenAndSetRequiredStatusChecks(d *schema.ResourceData, protection *githu } func requireSignedCommitsRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoName, branch, err := parseTwoPartID(d.Id(), "repository", "branch") if err != nil { @@ -407,7 +407,7 @@ func requireSignedCommitsRead(d *schema.ResourceData, meta interface{}) error { func requireSignedCommitsUpdate(d *schema.ResourceData, meta interface{}) (err error) { requiredSignedCommit := d.Get("require_signed_commits").(bool) - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoName, branch, err := parseTwoPartID(d.Id(), "repository", "branch") if err != nil { diff --git a/github/resource_github_branch_protection_test.go b/github/resource_github_branch_protection_test.go index e5f2b6051c..1cca573e50 100644 --- a/github/resource_github_branch_protection_test.go +++ b/github/resource_github_branch_protection_test.go @@ -229,7 +229,7 @@ func testAccCheckGithubProtectedBranchExists(n, id string, protection *github.Pr return fmt.Errorf("Expected ID to be %v, got %v", id, rs.Primary.ID) } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client o := testAccProvider.Meta().(*Organization).name r, b, err := parseTwoPartID(rs.Primary.ID, "repository", "branch") if err != nil { @@ -356,7 +356,7 @@ func testAccCheckGithubBranchProtectionNoPullRequestReviewsExist(protection *git } func testAccGithubBranchProtectionDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_branch_protection" { diff --git a/github/resource_github_issue_label.go b/github/resource_github_issue_label.go index 5f4fc5cf57..7e30e769ec 100644 --- a/github/resource_github_issue_label.go +++ b/github/resource_github_issue_label.go @@ -65,7 +65,7 @@ func resourceGithubIssueLabelCreateOrUpdate(d *schema.ResourceData, meta interfa return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Get("repository").(string) name := d.Get("name").(string) @@ -153,7 +153,7 @@ func resourceGithubIssueLabelRead(d *schema.ResourceData, meta interface{}) erro return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoName, name, err := parseTwoPartID(d.Id(), "repository", "name") if err != nil { return err @@ -199,7 +199,7 @@ func resourceGithubIssueLabelDelete(d *schema.ResourceData, meta interface{}) er return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Get("repository").(string) diff --git a/github/resource_github_issue_label_test.go b/github/resource_github_issue_label_test.go index 557777cc16..56da05775f 100644 --- a/github/resource_github_issue_label_test.go +++ b/github/resource_github_issue_label_test.go @@ -137,7 +137,7 @@ func testAccCheckGithubIssueLabelExists(n string, label *github.Label) resource. return fmt.Errorf("No issue label ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name repoName, name, err := parseTwoPartID(rs.Primary.ID, "repository", "name") if err != nil { @@ -179,7 +179,7 @@ func testAccCheckGithubIssueLabelIDUnchanged(label, updatedLabel *github.Label) } func testAccGithubIssueLabelDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_issue_label" { diff --git a/github/resource_github_membership.go b/github/resource_github_membership.go index 6024fa2173..659a997a5b 100644 --- a/github/resource_github_membership.go +++ b/github/resource_github_membership.go @@ -46,7 +46,7 @@ func resourceGithubMembershipCreateOrUpdate(d *schema.ResourceData, meta interfa return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name username := d.Get("username").(string) @@ -79,7 +79,7 @@ func resourceGithubMembershipRead(d *schema.ResourceData, meta interface{}) erro return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name _, username, err := parseTwoPartID(d.Id(), "organization", "username") @@ -122,7 +122,7 @@ func resourceGithubMembershipDelete(d *schema.ResourceData, meta interface{}) er return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name ctx := context.WithValue(context.Background(), ctxId, d.Id()) diff --git a/github/resource_github_membership_test.go b/github/resource_github_membership_test.go index 4ccda28296..3eae10fb5c 100644 --- a/github/resource_github_membership_test.go +++ b/github/resource_github_membership_test.go @@ -84,7 +84,7 @@ func TestAccGithubMembership_caseInsensitive(t *testing.T) { } func testAccCheckGithubMembershipDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_membership" { @@ -122,7 +122,7 @@ func testAccCheckGithubMembershipExists(n string, membership *github.Membership) return fmt.Errorf("No membership ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName, username, err := parseTwoPartID(rs.Primary.ID, "organization", "username") if err != nil { return err @@ -148,7 +148,7 @@ func testAccCheckGithubMembershipRoleState(n string, membership *github.Membersh return fmt.Errorf("No membership ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName, username, err := parseTwoPartID(rs.Primary.ID, "organization", "username") if err != nil { return err diff --git a/github/resource_github_organization_project.go b/github/resource_github_organization_project.go index 53627386bc..968fc14a13 100644 --- a/github/resource_github_organization_project.go +++ b/github/resource_github_organization_project.go @@ -48,7 +48,7 @@ func resourceGithubOrganizationProjectCreate(d *schema.ResourceData, meta interf return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name name := d.Get("name").(string) body := d.Get("body").(string) @@ -76,7 +76,7 @@ func resourceGithubOrganizationProjectRead(d *schema.ResourceData, meta interfac return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name projectID, err := strconv.ParseInt(d.Id(), 10, 64) @@ -120,7 +120,7 @@ func resourceGithubOrganizationProjectUpdate(d *schema.ResourceData, meta interf return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name name := d.Get("name").(string) @@ -151,7 +151,7 @@ func resourceGithubOrganizationProjectDelete(d *schema.ResourceData, meta interf return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name projectID, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { diff --git a/github/resource_github_organization_project_test.go b/github/resource_github_organization_project_test.go index efce23b7c1..e42484305b 100644 --- a/github/resource_github_organization_project_test.go +++ b/github/resource_github_organization_project_test.go @@ -42,7 +42,7 @@ func TestAccGithubOrganizationProject_basic(t *testing.T) { } func testAccGithubOrganizationProjectDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_organization_project" { @@ -81,7 +81,7 @@ func testAccCheckGithubOrganizationProjectExists(n string, project *github.Proje return err } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client gotProject, _, err := conn.Projects.GetProject(context.TODO(), projectID) if err != nil { return err diff --git a/github/resource_github_organization_webhook.go b/github/resource_github_organization_webhook.go index 062d561b72..16099848e6 100644 --- a/github/resource_github_organization_webhook.go +++ b/github/resource_github_organization_webhook.go @@ -78,7 +78,7 @@ func resourceGithubOrganizationWebhookCreate(d *schema.ResourceData, meta interf return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name webhookObj := resourceGithubOrganizationWebhookObject(d) @@ -109,7 +109,7 @@ func resourceGithubOrganizationWebhookRead(d *schema.ResourceData, meta interfac return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name hookID, err := strconv.ParseInt(d.Id(), 10, 64) @@ -166,7 +166,7 @@ func resourceGithubOrganizationWebhookUpdate(d *schema.ResourceData, meta interf return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name webhookObj := resourceGithubOrganizationWebhookObject(d) @@ -193,7 +193,7 @@ func resourceGithubOrganizationWebhookDelete(d *schema.ResourceData, meta interf return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name hookID, err := strconv.ParseInt(d.Id(), 10, 64) diff --git a/github/resource_github_organization_webhook_test.go b/github/resource_github_organization_webhook_test.go index 0a3d2a1d4a..cede776244 100644 --- a/github/resource_github_organization_webhook_test.go +++ b/github/resource_github_organization_webhook_test.go @@ -92,7 +92,7 @@ func testAccCheckGithubOrganizationWebhookExists(n string, hook *github.Hook) re } org := testAccProvider.Meta().(*Organization) - conn := org.client + conn := org.v3client getHook, _, err := conn.Organizations.GetHook(context.TODO(), org.name, hookID) if err != nil { return err @@ -144,7 +144,7 @@ func testAccCheckGithubOrganizationWebhookSecret(r, secret string) resource.Test } func testAccCheckGithubOrganizationWebhookDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name for _, rs := range s.RootModule().Resources { diff --git a/github/resource_github_project_column.go b/github/resource_github_project_column.go index b69f01706c..281435a953 100644 --- a/github/resource_github_project_column.go +++ b/github/resource_github_project_column.go @@ -45,7 +45,7 @@ func resourceGithubProjectColumnCreate(d *schema.ResourceData, meta interface{}) return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client options := github.ProjectColumnOptions{ Name: d.Get("name").(string), @@ -73,7 +73,7 @@ func resourceGithubProjectColumnCreate(d *schema.ResourceData, meta interface{}) } func resourceGithubProjectColumnRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client columnID, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { @@ -106,7 +106,7 @@ func resourceGithubProjectColumnRead(d *schema.ResourceData, meta interface{}) e } func resourceGithubProjectColumnUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client options := github.ProjectColumnOptions{ Name: d.Get("name").(string), @@ -128,7 +128,7 @@ func resourceGithubProjectColumnUpdate(d *schema.ResourceData, meta interface{}) } func resourceGithubProjectColumnDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client columnID, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { diff --git a/github/resource_github_project_column_test.go b/github/resource_github_project_column_test.go index 800171e0ec..af23fc5838 100644 --- a/github/resource_github_project_column_test.go +++ b/github/resource_github_project_column_test.go @@ -49,7 +49,7 @@ func TestAccGithubProjectColumn_basic(t *testing.T) { } func testAccGithubProjectColumnDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_project_column" { @@ -87,7 +87,7 @@ func testAccCheckGithubProjectColumnExists(n string, project *github.ProjectColu return err } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client gotColumn, _, err := conn.Projects.GetProjectColumn(context.TODO(), columnID) if err != nil { return err diff --git a/github/resource_github_repository.go b/github/resource_github_repository.go index ae26797079..fd82db0848 100644 --- a/github/resource_github_repository.go +++ b/github/resource_github_repository.go @@ -202,7 +202,7 @@ func resourceGithubRepositoryCreate(d *schema.ResourceData, meta interface{}) er return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client if branchName, hasDefaultBranch := d.GetOk("default_branch"); hasDefaultBranch && (branchName != "master") { return fmt.Errorf("Cannot set the default branch on a new repository to something other than 'master'.") @@ -271,7 +271,7 @@ func resourceGithubRepositoryRead(d *schema.ResourceData, meta interface{}) erro return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Id() @@ -343,7 +343,7 @@ func resourceGithubRepositoryUpdate(d *schema.ResourceData, meta interface{}) er return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoReq := resourceGithubRepositoryObject(d) // Can only set `default_branch` on an already created repository with the target branches ref already in-place @@ -383,7 +383,7 @@ func resourceGithubRepositoryDelete(d *schema.ResourceData, meta interface{}) er return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoName := d.Id() orgName := meta.(*Organization).name ctx := context.WithValue(context.Background(), ctxId, d.Id()) diff --git a/github/resource_github_repository_collaborator.go b/github/resource_github_repository_collaborator.go index 49357a9987..91f7c1922f 100644 --- a/github/resource_github_repository_collaborator.go +++ b/github/resource_github_repository_collaborator.go @@ -54,7 +54,7 @@ func resourceGithubRepositoryCollaboratorCreate(d *schema.ResourceData, meta int return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name username := d.Get("username").(string) @@ -86,7 +86,7 @@ func resourceGithubRepositoryCollaboratorRead(d *schema.ResourceData, meta inter return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName, username, err := parseTwoPartID(d.Id(), "repository", "username") @@ -174,7 +174,7 @@ func resourceGithubRepositoryCollaboratorDelete(d *schema.ResourceData, meta int return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name username := d.Get("username").(string) diff --git a/github/resource_github_repository_collaborator_test.go b/github/resource_github_repository_collaborator_test.go index 93ed3bf0f6..dec22abbcf 100644 --- a/github/resource_github_repository_collaborator_test.go +++ b/github/resource_github_repository_collaborator_test.go @@ -93,7 +93,7 @@ func TestAccGithubRepositoryCollaborator_caseInsensitive(t *testing.T) { } func testAccCheckGithubRepositoryCollaboratorDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_repository_collaborator" { @@ -133,7 +133,7 @@ func testAccCheckGithubRepositoryCollaboratorExists(n string) resource.TestCheck return fmt.Errorf("No membership ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name repoName, username, err := parseTwoPartID(rs.Primary.ID, "repository", "username") if err != nil { @@ -173,7 +173,7 @@ func testAccCheckGithubRepositoryCollaboratorPermission(n string) resource.TestC return fmt.Errorf("No membership ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name repoName, username, err := parseTwoPartID(rs.Primary.ID, "repository", "username") if err != nil { @@ -224,7 +224,7 @@ func testAccCheckGithubRepositoryCollaboratorInvited(repoName, username string, return func(s *terraform.State) error { opt := &github.ListOptions{PerPage: maxPerPage} - client := testAccProvider.Meta().(*Organization).client + client := testAccProvider.Meta().(*Organization).v3client org := testAccProvider.Meta().(*Organization).name for { diff --git a/github/resource_github_repository_deploy_key.go b/github/resource_github_repository_deploy_key.go index bc276968c3..da03dcb625 100644 --- a/github/resource_github_repository_deploy_key.go +++ b/github/resource_github_repository_deploy_key.go @@ -59,7 +59,7 @@ func resourceGithubRepositoryDeployKeyCreate(d *schema.ResourceData, meta interf return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client repoName := d.Get("repository").(string) key := d.Get("key").(string) @@ -92,7 +92,7 @@ func resourceGithubRepositoryDeployKeyRead(d *schema.ResourceData, meta interfac return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client owner := meta.(*Organization).name repoName, idString, err := parseTwoPartID(d.Id(), "repository", "ID") @@ -141,7 +141,7 @@ func resourceGithubRepositoryDeployKeyDelete(d *schema.ResourceData, meta interf return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client owner := meta.(*Organization).name repoName, idString, err := parseTwoPartID(d.Id(), "repository", "ID") diff --git a/github/resource_github_repository_deploy_key_test.go b/github/resource_github_repository_deploy_key_test.go index c31d4ded0c..fbb7a4d4ed 100644 --- a/github/resource_github_repository_deploy_key_test.go +++ b/github/resource_github_repository_deploy_key_test.go @@ -81,7 +81,7 @@ func TestAccGithubRepositoryDeployKey_basic(t *testing.T) { } func testAccCheckGithubRepositoryDeployKeyDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_repository_deploy_key" { @@ -121,7 +121,7 @@ func testAccCheckGithubRepositoryDeployKeyExists(n string) resource.TestCheckFun return fmt.Errorf("No membership ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name repoName, idString, err := parseTwoPartID(rs.Primary.ID, "repository", "ID") if err != nil { diff --git a/github/resource_github_repository_file.go b/github/resource_github_repository_file.go index eb819e45fa..2d0287bc36 100644 --- a/github/resource_github_repository_file.go +++ b/github/resource_github_repository_file.go @@ -31,7 +31,7 @@ func resourceGithubRepositoryFile() *schema.Resource { branch = parts[1] } - client := meta.(*Organization).client + client := meta.(*Organization).v3client org := meta.(*Organization).name repo, file := splitRepoFilePath(parts[0]) if err := checkRepositoryFileExists(client, org, repo, file, branch); err != nil { @@ -139,7 +139,7 @@ func resourceGithubRepositoryFileCreate(d *schema.ResourceData, meta interface{} return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client org := meta.(*Organization).name ctx := context.Background() @@ -178,7 +178,7 @@ func resourceGithubRepositoryFileRead(d *schema.ResourceData, meta interface{}) return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client org := meta.(*Organization).name ctx := context.WithValue(context.Background(), ctxId, d.Id()) @@ -227,7 +227,7 @@ func resourceGithubRepositoryFileUpdate(d *schema.ResourceData, meta interface{} return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client org := meta.(*Organization).name ctx := context.Background() @@ -263,7 +263,7 @@ func resourceGithubRepositoryFileDelete(d *schema.ResourceData, meta interface{} return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client org := meta.(*Organization).name ctx := context.Background() diff --git a/github/resource_github_repository_file_test.go b/github/resource_github_repository_file_test.go index c980a296a1..191b90e377 100644 --- a/github/resource_github_repository_file_test.go +++ b/github/resource_github_repository_file_test.go @@ -48,7 +48,7 @@ func testSweepRepositoryFiles(region string) error { } func testSweepDeleteRepositoryFiles(meta interface{}, branch string) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client org := meta.(*Organization).name _, files, _, err := client.Repositories.GetContents( @@ -309,7 +309,7 @@ func testAccCheckGithubRepositoryFileExists(n, path, branch string, content *git return fmt.Errorf("No repository file path set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client org := testAccProvider.Meta().(*Organization).name opts := &github.RepositoryContentGetOptions{Ref: branch} @@ -382,7 +382,7 @@ func testAccCheckGithubRepositoryFileCommitAttributes(commit *github.RepositoryC } func testAccCheckGithubRepositoryFileDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client org := testAccProvider.Meta().(*Organization).name for _, rs := range s.RootModule().Resources { diff --git a/github/resource_github_repository_project.go b/github/resource_github_repository_project.go index af468e63be..cb2ad9fbc1 100644 --- a/github/resource_github_repository_project.go +++ b/github/resource_github_repository_project.go @@ -62,7 +62,7 @@ func resourceGithubRepositoryProjectCreate(d *schema.ResourceData, meta interfac return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Get("repository").(string) @@ -92,7 +92,7 @@ func resourceGithubRepositoryProjectRead(d *schema.ResourceData, meta interface{ return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name projectID, err := strconv.ParseInt(d.Id(), 10, 64) @@ -131,7 +131,7 @@ func resourceGithubRepositoryProjectRead(d *schema.ResourceData, meta interface{ } func resourceGithubRepositoryProjectUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client name := d.Get("name").(string) body := d.Get("body").(string) @@ -157,7 +157,7 @@ func resourceGithubRepositoryProjectUpdate(d *schema.ResourceData, meta interfac } func resourceGithubRepositoryProjectDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client projectID, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { diff --git a/github/resource_github_repository_project_test.go b/github/resource_github_repository_project_test.go index c26480f422..b53e25a478 100644 --- a/github/resource_github_repository_project_test.go +++ b/github/resource_github_repository_project_test.go @@ -47,7 +47,7 @@ func TestAccGithubRepositoryProject_basic(t *testing.T) { } func testAccGithubRepositoryProjectDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_repository_project" { @@ -86,7 +86,7 @@ func testAccCheckGithubRepositoryProjectExists(n string, project *github.Project return err } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client gotProject, _, err := conn.Projects.GetProject(context.TODO(), projectID) if err != nil { return err diff --git a/github/resource_github_repository_test.go b/github/resource_github_repository_test.go index 960004df12..cf012914d9 100644 --- a/github/resource_github_repository_test.go +++ b/github/resource_github_repository_test.go @@ -30,7 +30,7 @@ func testSweepRepositories(region string) error { return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client repos, _, err := client.Repositories.List(context.TODO(), meta.(*Organization).name, nil) if err != nil { @@ -536,7 +536,7 @@ func testAccCheckGithubRepositoryExists(n string, repo *github.Repository) resou } org := testAccProvider.Meta().(*Organization) - conn := org.client + conn := org.v3client gotRepo, _, err := conn.Repositories.Get(context.TODO(), org.name, repoName) if err != nil { return err @@ -688,7 +688,7 @@ func testAccCheckGithubRepositoryAttributes(repo *github.Repository, want *testA } func testAccCheckGithubRepositoryDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name for _, rs := range s.RootModule().Resources { @@ -711,19 +711,21 @@ func testAccCheckGithubRepositoryDestroy(s *terraform.State) error { } func testAccCreateRepositoryBranch(branch, repository string) error { + baseURL := os.Getenv("GITHUB_BASE_URL") org := os.Getenv("GITHUB_ORGANIZATION") token := os.Getenv("GITHUB_TOKEN") config := Config{ + BaseURL: baseURL, Token: token, Organization: org, } - c, err := config.Client() + c, err := config.Clients() if err != nil { return fmt.Errorf("Error creating github client: %s", err) } - client := c.(*Organization).client + client := c.(*Organization).v3client refs, _, err := client.Git.GetRefs(context.TODO(), org, repository, "heads") if err != nil { diff --git a/github/resource_github_repository_webhook.go b/github/resource_github_repository_webhook.go index f54517dd67..80e359568a 100644 --- a/github/resource_github_repository_webhook.go +++ b/github/resource_github_repository_webhook.go @@ -103,7 +103,7 @@ func resourceGithubRepositoryWebhookCreate(d *schema.ResourceData, meta interfac return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Get("repository").(string) @@ -137,7 +137,7 @@ func resourceGithubRepositoryWebhookRead(d *schema.ResourceData, meta interface{ return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Get("repository").(string) @@ -195,7 +195,7 @@ func resourceGithubRepositoryWebhookUpdate(d *schema.ResourceData, meta interfac return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Get("repository").(string) @@ -216,7 +216,7 @@ func resourceGithubRepositoryWebhookUpdate(d *schema.ResourceData, meta interfac } func resourceGithubRepositoryWebhookDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name repoName := d.Get("repository").(string) diff --git a/github/resource_github_repository_webhook_test.go b/github/resource_github_repository_webhook_test.go index 7fee841087..a509413c0b 100644 --- a/github/resource_github_repository_webhook_test.go +++ b/github/resource_github_repository_webhook_test.go @@ -117,7 +117,7 @@ func testAccCheckGithubRepositoryWebhookExists(n string, repoName string, hook * } org := testAccProvider.Meta().(*Organization) - conn := org.client + conn := org.v3client getHook, _, err := conn.Repositories.GetHook(context.TODO(), org.name, repoName, hookID) if err != nil { return err @@ -154,7 +154,7 @@ func testAccCheckGithubRepositoryWebhookAttributes(hook *github.Hook, want *test } func testAccCheckGithubRepositoryWebhookDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name for _, rs := range s.RootModule().Resources { diff --git a/github/resource_github_team.go b/github/resource_github_team.go index 59679e4da2..421ebea040 100644 --- a/github/resource_github_team.go +++ b/github/resource_github_team.go @@ -65,7 +65,7 @@ func resourceGithubTeamCreate(d *schema.ResourceData, meta interface{}) error { return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name name := d.Get("name").(string) @@ -107,7 +107,7 @@ func resourceGithubTeamRead(d *schema.ResourceData, meta interface{}) error { return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgId := meta.(*Organization).id id, err := strconv.ParseInt(d.Id(), 10, 64) @@ -158,7 +158,7 @@ func resourceGithubTeamUpdate(d *schema.ResourceData, meta interface{}) error { return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgId := meta.(*Organization).id editedTeam := github.NewTeam{ @@ -204,7 +204,7 @@ func resourceGithubTeamDelete(d *schema.ResourceData, meta interface{}) error { return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgId := meta.(*Organization).id id, err := strconv.ParseInt(d.Id(), 10, 64) diff --git a/github/resource_github_team_membership.go b/github/resource_github_team_membership.go index 3335330ac1..f2b219d9c5 100644 --- a/github/resource_github_team_membership.go +++ b/github/resource_github_team_membership.go @@ -49,7 +49,7 @@ func resourceGithubTeamMembership() *schema.Resource { } func resourceGithubTeamMembershipCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client teamIdString := d.Get("team_id").(string) teamId, err := strconv.ParseInt(teamIdString, 10, 64) @@ -79,7 +79,7 @@ func resourceGithubTeamMembershipCreateOrUpdate(d *schema.ResourceData, meta int } func resourceGithubTeamMembershipRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client teamIdString, username, err := parseTwoPartID(d.Id(), "team_id", "username") if err != nil { return err @@ -126,7 +126,7 @@ func resourceGithubTeamMembershipRead(d *schema.ResourceData, meta interface{}) } func resourceGithubTeamMembershipDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client teamIdString := d.Get("team_id").(string) teamId, err := strconv.ParseInt(teamIdString, 10, 64) diff --git a/github/resource_github_team_membership_test.go b/github/resource_github_team_membership_test.go index 7a1db32a71..92156968e4 100644 --- a/github/resource_github_team_membership_test.go +++ b/github/resource_github_team_membership_test.go @@ -96,7 +96,7 @@ func TestAccGithubTeamMembership_caseInsensitive(t *testing.T) { } func testAccCheckGithubTeamMembershipDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_team_membership" { @@ -139,7 +139,7 @@ func testAccCheckGithubTeamMembershipExists(n string, membership *github.Members return fmt.Errorf("No team membership ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client teamIdString, username, err := parseTwoPartID(rs.Primary.ID, "team_id", "username") if err != nil { return err @@ -171,7 +171,7 @@ func testAccCheckGithubTeamMembershipRoleState(n, expected string, membership *g return fmt.Errorf("No team membership ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client teamIdString, username, err := parseTwoPartID(rs.Primary.ID, "team_id", "username") if err != nil { return err diff --git a/github/resource_github_team_repository.go b/github/resource_github_team_repository.go index 38234196a6..81720b9cf5 100644 --- a/github/resource_github_team_repository.go +++ b/github/resource_github_team_repository.go @@ -52,7 +52,7 @@ func resourceGithubTeamRepositoryCreate(d *schema.ResourceData, meta interface{} return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgId := meta.(*Organization).id teamIdString := d.Get("team_id").(string) @@ -92,7 +92,7 @@ func resourceGithubTeamRepositoryRead(d *schema.ResourceData, meta interface{}) return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgId := meta.(*Organization).id teamIdString, repoName, err := parseTwoPartID(d.Id(), "team_id", "repository") @@ -147,7 +147,7 @@ func resourceGithubTeamRepositoryUpdate(d *schema.ResourceData, meta interface{} return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgId := meta.(*Organization).id teamIdString := d.Get("team_id").(string) @@ -187,7 +187,7 @@ func resourceGithubTeamRepositoryDelete(d *schema.ResourceData, meta interface{} return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgId := meta.(*Organization).id teamIdString := d.Get("team_id").(string) diff --git a/github/resource_github_team_repository_test.go b/github/resource_github_team_repository_test.go index 3b15a251ee..8acb9ef151 100644 --- a/github/resource_github_team_repository_test.go +++ b/github/resource_github_team_repository_test.go @@ -111,7 +111,7 @@ func testAccCheckGithubTeamRepositoryExists(n string, repository *github.Reposit return fmt.Errorf("No team repository ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client teamIdString, repoName, err := parseTwoPartID(rs.Primary.ID, "team_id", "repository") if err != nil { @@ -137,7 +137,7 @@ func testAccCheckGithubTeamRepositoryExists(n string, repository *github.Reposit } func testAccCheckGithubTeamRepositoryDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgId := testAccProvider.Meta().(*Organization).id for _, rs := range s.RootModule().Resources { diff --git a/github/resource_github_team_test.go b/github/resource_github_team_test.go index 93ba009c5c..2723c1d40c 100644 --- a/github/resource_github_team_test.go +++ b/github/resource_github_team_test.go @@ -140,7 +140,7 @@ func testAccCheckGithubTeamExists(n string, team *github.Team) resource.TestChec return fmt.Errorf("No Team ID is set") } - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client id, err := strconv.ParseInt(rs.Primary.ID, 10, 64) if err != nil { return unconvertibleIdErr(rs.Primary.ID, err) @@ -178,7 +178,7 @@ func testAccCheckGithubTeamAttributes(team *github.Team, name, description strin } func testAccCheckGithubTeamDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgId := testAccProvider.Meta().(*Organization).id for _, rs := range s.RootModule().Resources { diff --git a/github/resource_github_user_gpg_key.go b/github/resource_github_user_gpg_key.go index 750dec3e6a..2d6b84f327 100644 --- a/github/resource_github_user_gpg_key.go +++ b/github/resource_github_user_gpg_key.go @@ -35,7 +35,7 @@ func resourceGithubUserGpgKey() *schema.Resource { } func resourceGithubUserGpgKeyCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client pubKey := d.Get("armored_public_key").(string) ctx := context.Background() @@ -52,7 +52,7 @@ func resourceGithubUserGpgKeyCreate(d *schema.ResourceData, meta interface{}) er } func resourceGithubUserGpgKeyRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client id, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { @@ -86,7 +86,7 @@ func resourceGithubUserGpgKeyRead(d *schema.ResourceData, meta interface{}) erro } func resourceGithubUserGpgKeyDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client id, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { diff --git a/github/resource_github_user_gpg_key_test.go b/github/resource_github_user_gpg_key_test.go index 75ce8da49f..ee494d6a32 100644 --- a/github/resource_github_user_gpg_key_test.go +++ b/github/resource_github_user_gpg_key_test.go @@ -50,7 +50,7 @@ func testAccCheckGithubUserGpgKeyExists(n string, key *github.GPGKey) resource.T } org := testAccProvider.Meta().(*Organization) - receivedKey, _, err := org.client.Users.GetGPGKey(context.TODO(), id) + receivedKey, _, err := org.v3client.Users.GetGPGKey(context.TODO(), id) if err != nil { return err } @@ -60,7 +60,7 @@ func testAccCheckGithubUserGpgKeyExists(n string, key *github.GPGKey) resource.T } func testAccCheckGithubUserGpgKeyDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_user_gpg_key" { diff --git a/github/resource_github_user_invitation_accepter.go b/github/resource_github_user_invitation_accepter.go index d70c1eefc8..463edbeba1 100644 --- a/github/resource_github_user_invitation_accepter.go +++ b/github/resource_github_user_invitation_accepter.go @@ -26,7 +26,7 @@ func resourceGithubUserInvitationAccepter() *schema.Resource { } func resourceGithubUserInvitationAccepterCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client invitationIdString := d.Get("invitation_id").(string) invitationId, err := strconv.Atoi(invitationIdString) diff --git a/github/resource_github_user_ssh_key.go b/github/resource_github_user_ssh_key.go index f61a85e908..a2219e4c0b 100644 --- a/github/resource_github_user_ssh_key.go +++ b/github/resource_github_user_ssh_key.go @@ -48,7 +48,7 @@ func resourceGithubUserSshKey() *schema.Resource { } func resourceGithubUserSshKeyCreate(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client title := d.Get("title").(string) key := d.Get("key").(string) @@ -69,7 +69,7 @@ func resourceGithubUserSshKeyCreate(d *schema.ResourceData, meta interface{}) er } func resourceGithubUserSshKeyRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client id, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { @@ -105,7 +105,7 @@ func resourceGithubUserSshKeyRead(d *schema.ResourceData, meta interface{}) erro } func resourceGithubUserSshKeyDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client id, err := strconv.ParseInt(d.Id(), 10, 64) if err != nil { diff --git a/github/resource_github_user_ssh_key_test.go b/github/resource_github_user_ssh_key_test.go index 59f974e877..510311e1cf 100644 --- a/github/resource_github_user_ssh_key_test.go +++ b/github/resource_github_user_ssh_key_test.go @@ -58,7 +58,7 @@ func testAccCheckGithubUserSshKeyExists(n string, key *github.Key) resource.Test } org := testAccProvider.Meta().(*Organization) - receivedKey, _, err := org.client.Users.GetKey(context.TODO(), id) + receivedKey, _, err := org.v3client.Users.GetKey(context.TODO(), id) if err != nil { return err } @@ -68,7 +68,7 @@ func testAccCheckGithubUserSshKeyExists(n string, key *github.Key) resource.Test } func testAccCheckGithubUserSshKeyDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client for _, rs := range s.RootModule().Resources { if rs.Type != "github_user_ssh_key" { diff --git a/github/resource_organization_block.go b/github/resource_organization_block.go index 9195126d90..2104867c2f 100644 --- a/github/resource_organization_block.go +++ b/github/resource_organization_block.go @@ -39,7 +39,7 @@ func resourceOrganizationBlockCreate(d *schema.ResourceData, meta interface{}) e return err } - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name ctx := context.Background() username := d.Get("username").(string) @@ -55,7 +55,7 @@ func resourceOrganizationBlockCreate(d *schema.ResourceData, meta interface{}) e } func resourceOrganizationBlockRead(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name username := d.Id() @@ -95,7 +95,7 @@ func resourceOrganizationBlockRead(d *schema.ResourceData, meta interface{}) err } func resourceOrganizationBlockDelete(d *schema.ResourceData, meta interface{}) error { - client := meta.(*Organization).client + client := meta.(*Organization).v3client orgName := meta.(*Organization).name username := d.Id() diff --git a/github/resource_organization_block_test.go b/github/resource_organization_block_test.go index a8a078cabf..361ab1b0d2 100644 --- a/github/resource_organization_block_test.go +++ b/github/resource_organization_block_test.go @@ -33,7 +33,7 @@ func TestAccOrganizationBlock_basic(t *testing.T) { } func testAccOrganizationBlockDestroy(s *terraform.State) error { - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name for _, rs := range s.RootModule().Resources { @@ -60,7 +60,7 @@ func testAccCheckOrganizationBlockExists(n string) resource.TestCheckFunc { } username := rs.Primary.ID - conn := testAccProvider.Meta().(*Organization).client + conn := testAccProvider.Meta().(*Organization).v3client orgName := testAccProvider.Meta().(*Organization).name blocked, _, err := conn.Organizations.IsBlocked(context.TODO(), orgName, username) diff --git a/github/sweeper_test.go b/github/sweeper_test.go index 973535a2a3..44fb7bab40 100644 --- a/github/sweeper_test.go +++ b/github/sweeper_test.go @@ -27,7 +27,7 @@ func sharedConfigForRegion(region string) (interface{}, error) { BaseURL: "", } - client, err := config.Client() + client, err := config.Clients() if err != nil { return nil, fmt.Errorf("error getting Github client") } From 6ae92c23e2d759fd13826c236e645dfc3a1b14fa Mon Sep 17 00:00:00 2001 From: Patrick Marabeas Date: Mon, 10 Feb 2020 13:46:44 +1100 Subject: [PATCH 3/4] Update vendor files --- .../go-textseg/textseg/make_tables.go | 307 --- .../go-textseg/textseg/make_test_tables.go | 212 -- .../go-github/v29/github/gen-accessors.go | 333 ---- .../v29/github/gen-stringify-test.go | 358 ---- .../hashicorp/go-getter/.travis.yml | 24 - .../github.com/hashicorp/go-getter/README.md | 8 +- .../github.com/hashicorp/go-getter/client.go | 2 +- .../github.com/hashicorp/go-getter/get_git.go | 28 +- .../hashicorp/go-getter/get_http.go | 24 +- .../github.com/hashicorp/hcl/v2/CHANGELOG.md | 33 + vendor/github.com/hashicorp/hcl/v2/README.md | 11 +- .../github.com/hashicorp/hcl/v2/appveyor.yml | 13 + .../hcl/v2/ext/customdecode/README.md | 209 ++ .../hcl/v2/ext/customdecode/customdecode.go | 56 + .../v2/ext/customdecode/expression_type.go | 146 ++ .../hashicorp/hcl/v2/ext/typeexpr/README.md | 68 + .../hcl/v2/ext/typeexpr/type_type.go | 118 ++ vendor/github.com/hashicorp/hcl/v2/go.mod | 4 +- vendor/github.com/hashicorp/hcl/v2/go.sum | 8 +- .../hashicorp/hcl/v2/gohcl/decode.go | 26 +- .../hashicorp/hcl/v2/hcldec/spec.go | 36 +- .../hashicorp/hcl/v2/hclsyntax/expression.go | 56 +- .../hcl/v2/hclsyntax/expression_vars_gen.go | 99 - .../hashicorp/hcl/v2/hclsyntax/parser.go | 11 +- .../hashicorp/hcl/v2/hclwrite/ast_body.go | 22 +- .../hcl/v2/hclwrite/ast_expression.go | 23 + .../hashicorp/hcl/v2/hclwrite/generate.go | 8 +- .../terraform-config-inspect/tfconfig/load.go | 4 +- .../tfconfig/load_hcl.go | 39 +- .../tfconfig/load_legacy.go | 10 +- .../tfconfig/module.go | 6 +- .../tfconfig/provider_ref.go | 76 + vendor/github.com/hashicorp/terraform/LICENSE | 354 ++++ .../terraform/helper/logging/indent.go | 23 + .../terraform/helper/logging/level.go | 159 ++ .../terraform/helper/logging/logging.go | 109 + .../terraform/helper/logging/transport.go | 70 + .../github.com/shurcooL/githubv4/.travis.yml | 16 + vendor/github.com/shurcooL/githubv4/LICENSE | 21 + vendor/github.com/shurcooL/githubv4/README.md | 408 ++++ vendor/github.com/shurcooL/githubv4/doc.go | 13 + vendor/github.com/shurcooL/githubv4/enum.go | 1382 +++++++++++++ .../github.com/shurcooL/githubv4/githubv4.go | 56 + vendor/github.com/shurcooL/githubv4/input.go | 1751 +++++++++++++++++ vendor/github.com/shurcooL/githubv4/scalar.go | 139 ++ .../github.com/shurcooL/graphql/.travis.yml | 16 + vendor/github.com/shurcooL/graphql/LICENSE | 21 + vendor/github.com/shurcooL/graphql/README.md | 293 +++ vendor/github.com/shurcooL/graphql/doc.go | 11 + vendor/github.com/shurcooL/graphql/graphql.go | 123 ++ .../shurcooL/graphql/ident/ident.go | 240 +++ .../graphql/internal/jsonutil/graphql.go | 311 +++ vendor/github.com/shurcooL/graphql/query.go | 131 ++ vendor/github.com/shurcooL/graphql/scalar.go | 51 + vendor/github.com/ulikunitz/xz/example.go | 40 - vendor/golang.org/x/sys/unix/mkasm_darwin.go | 61 - vendor/golang.org/x/sys/unix/mkpost.go | 122 -- vendor/golang.org/x/sys/unix/mksyscall.go | 407 ---- .../x/sys/unix/mksyscall_aix_ppc.go | 415 ---- .../x/sys/unix/mksyscall_aix_ppc64.go | 614 ------ .../x/sys/unix/mksyscall_solaris.go | 335 ---- .../golang.org/x/sys/unix/mksysctl_openbsd.go | 355 ---- vendor/golang.org/x/sys/unix/mksysnum.go | 190 -- vendor/golang.org/x/sys/unix/types_aix.go | 237 --- vendor/golang.org/x/sys/unix/types_darwin.go | 283 --- .../golang.org/x/sys/unix/types_dragonfly.go | 263 --- vendor/golang.org/x/sys/unix/types_freebsd.go | 400 ---- vendor/golang.org/x/sys/unix/types_netbsd.go | 290 --- vendor/golang.org/x/sys/unix/types_openbsd.go | 283 --- vendor/golang.org/x/sys/unix/types_solaris.go | 266 --- vendor/golang.org/x/text/unicode/bidi/gen.go | 133 -- .../x/text/unicode/bidi/gen_ranges.go | 57 - .../x/text/unicode/bidi/gen_trieval.go | 64 - .../x/text/unicode/norm/maketables.go | 986 ---------- .../golang.org/x/text/unicode/norm/triegen.go | 117 -- vendor/modules.txt | 247 +-- 76 files changed, 6772 insertions(+), 7469 deletions(-) delete mode 100644 vendor/github.com/apparentlymart/go-textseg/textseg/make_tables.go delete mode 100644 vendor/github.com/apparentlymart/go-textseg/textseg/make_test_tables.go delete mode 100644 vendor/github.com/google/go-github/v29/github/gen-accessors.go delete mode 100644 vendor/github.com/google/go-github/v29/github/gen-stringify-test.go delete mode 100644 vendor/github.com/hashicorp/go-getter/.travis.yml create mode 100644 vendor/github.com/hashicorp/hcl/v2/appveyor.yml create mode 100644 vendor/github.com/hashicorp/hcl/v2/ext/customdecode/README.md create mode 100644 vendor/github.com/hashicorp/hcl/v2/ext/customdecode/customdecode.go create mode 100644 vendor/github.com/hashicorp/hcl/v2/ext/customdecode/expression_type.go create mode 100644 vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/type_type.go delete mode 100644 vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression_vars_gen.go create mode 100644 vendor/github.com/hashicorp/terraform/LICENSE create mode 100644 vendor/github.com/hashicorp/terraform/helper/logging/indent.go create mode 100644 vendor/github.com/hashicorp/terraform/helper/logging/level.go create mode 100644 vendor/github.com/hashicorp/terraform/helper/logging/logging.go create mode 100644 vendor/github.com/hashicorp/terraform/helper/logging/transport.go create mode 100644 vendor/github.com/shurcooL/githubv4/.travis.yml create mode 100644 vendor/github.com/shurcooL/githubv4/LICENSE create mode 100644 vendor/github.com/shurcooL/githubv4/README.md create mode 100644 vendor/github.com/shurcooL/githubv4/doc.go create mode 100644 vendor/github.com/shurcooL/githubv4/enum.go create mode 100644 vendor/github.com/shurcooL/githubv4/githubv4.go create mode 100644 vendor/github.com/shurcooL/githubv4/input.go create mode 100644 vendor/github.com/shurcooL/githubv4/scalar.go create mode 100644 vendor/github.com/shurcooL/graphql/.travis.yml create mode 100644 vendor/github.com/shurcooL/graphql/LICENSE create mode 100644 vendor/github.com/shurcooL/graphql/README.md create mode 100644 vendor/github.com/shurcooL/graphql/doc.go create mode 100644 vendor/github.com/shurcooL/graphql/graphql.go create mode 100644 vendor/github.com/shurcooL/graphql/ident/ident.go create mode 100644 vendor/github.com/shurcooL/graphql/internal/jsonutil/graphql.go create mode 100644 vendor/github.com/shurcooL/graphql/query.go create mode 100644 vendor/github.com/shurcooL/graphql/scalar.go delete mode 100644 vendor/github.com/ulikunitz/xz/example.go delete mode 100644 vendor/golang.org/x/sys/unix/mkasm_darwin.go delete mode 100644 vendor/golang.org/x/sys/unix/mkpost.go delete mode 100644 vendor/golang.org/x/sys/unix/mksyscall.go delete mode 100644 vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go delete mode 100644 vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go delete mode 100644 vendor/golang.org/x/sys/unix/mksyscall_solaris.go delete mode 100644 vendor/golang.org/x/sys/unix/mksysctl_openbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/mksysnum.go delete mode 100644 vendor/golang.org/x/sys/unix/types_aix.go delete mode 100644 vendor/golang.org/x/sys/unix/types_darwin.go delete mode 100644 vendor/golang.org/x/sys/unix/types_dragonfly.go delete mode 100644 vendor/golang.org/x/sys/unix/types_freebsd.go delete mode 100644 vendor/golang.org/x/sys/unix/types_netbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/types_openbsd.go delete mode 100644 vendor/golang.org/x/sys/unix/types_solaris.go delete mode 100644 vendor/golang.org/x/text/unicode/bidi/gen.go delete mode 100644 vendor/golang.org/x/text/unicode/bidi/gen_ranges.go delete mode 100644 vendor/golang.org/x/text/unicode/bidi/gen_trieval.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/maketables.go delete mode 100644 vendor/golang.org/x/text/unicode/norm/triegen.go diff --git a/vendor/github.com/apparentlymart/go-textseg/textseg/make_tables.go b/vendor/github.com/apparentlymart/go-textseg/textseg/make_tables.go deleted file mode 100644 index aad3d0506a..0000000000 --- a/vendor/github.com/apparentlymart/go-textseg/textseg/make_tables.go +++ /dev/null @@ -1,307 +0,0 @@ -// Copyright (c) 2014 Couchbase, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file -// except in compliance with the License. You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software distributed under the -// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, -// either express or implied. See the License for the specific language governing permissions -// and limitations under the License. - -// Modified by Martin Atkins to serve the needs of package textseg. - -// +build ignore - -package main - -import ( - "bufio" - "flag" - "fmt" - "io" - "log" - "net/http" - "os" - "os/exec" - "sort" - "strconv" - "strings" - "unicode" -) - -var url = flag.String("url", - "http://www.unicode.org/Public/"+unicode.Version+"/ucd/auxiliary/", - "URL of Unicode database directory") -var verbose = flag.Bool("verbose", - false, - "write data to stdout as it is parsed") -var localFiles = flag.Bool("local", - false, - "data files have been copied to the current directory; for debugging only") -var outputFile = flag.String("output", - "", - "output file for generated tables; default stdout") - -var output *bufio.Writer - -func main() { - flag.Parse() - setupOutput() - - graphemePropertyRanges := make(map[string]*unicode.RangeTable) - loadUnicodeData("GraphemeBreakProperty.txt", graphemePropertyRanges) - wordPropertyRanges := make(map[string]*unicode.RangeTable) - loadUnicodeData("WordBreakProperty.txt", wordPropertyRanges) - sentencePropertyRanges := make(map[string]*unicode.RangeTable) - loadUnicodeData("SentenceBreakProperty.txt", sentencePropertyRanges) - - fmt.Fprintf(output, fileHeader, *url) - generateTables("Grapheme", graphemePropertyRanges) - generateTables("Word", wordPropertyRanges) - generateTables("Sentence", sentencePropertyRanges) - - flushOutput() -} - -// WordBreakProperty.txt has the form: -// 05F0..05F2 ; Hebrew_Letter # Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW LIGATURE YIDDISH DOUBLE YOD -// FB1D ; Hebrew_Letter # Lo HEBREW LETTER YOD WITH HIRIQ -func openReader(file string) (input io.ReadCloser) { - if *localFiles { - f, err := os.Open(file) - if err != nil { - log.Fatal(err) - } - input = f - } else { - path := *url + file - resp, err := http.Get(path) - if err != nil { - log.Fatal(err) - } - if resp.StatusCode != 200 { - log.Fatal("bad GET status for "+file, resp.Status) - } - input = resp.Body - } - return -} - -func loadUnicodeData(filename string, propertyRanges map[string]*unicode.RangeTable) { - f := openReader(filename) - defer f.Close() - bufioReader := bufio.NewReader(f) - line, err := bufioReader.ReadString('\n') - for err == nil { - parseLine(line, propertyRanges) - line, err = bufioReader.ReadString('\n') - } - // if the err was EOF still need to process last value - if err == io.EOF { - parseLine(line, propertyRanges) - } -} - -const comment = "#" -const sep = ";" -const rnge = ".." - -func parseLine(line string, propertyRanges map[string]*unicode.RangeTable) { - if strings.HasPrefix(line, comment) { - return - } - line = strings.TrimSpace(line) - if len(line) == 0 { - return - } - commentStart := strings.Index(line, comment) - if commentStart > 0 { - line = line[0:commentStart] - } - pieces := strings.Split(line, sep) - if len(pieces) != 2 { - log.Printf("unexpected %d pieces in %s", len(pieces), line) - return - } - - propertyName := strings.TrimSpace(pieces[1]) - - rangeTable, ok := propertyRanges[propertyName] - if !ok { - rangeTable = &unicode.RangeTable{ - LatinOffset: 0, - } - propertyRanges[propertyName] = rangeTable - } - - codepointRange := strings.TrimSpace(pieces[0]) - rngeIndex := strings.Index(codepointRange, rnge) - - if rngeIndex < 0 { - // single codepoint, not range - codepointInt, err := strconv.ParseUint(codepointRange, 16, 64) - if err != nil { - log.Printf("error parsing int: %v", err) - return - } - if codepointInt < 0x10000 { - r16 := unicode.Range16{ - Lo: uint16(codepointInt), - Hi: uint16(codepointInt), - Stride: 1, - } - addR16ToTable(rangeTable, r16) - } else { - r32 := unicode.Range32{ - Lo: uint32(codepointInt), - Hi: uint32(codepointInt), - Stride: 1, - } - addR32ToTable(rangeTable, r32) - } - } else { - rngeStart := codepointRange[0:rngeIndex] - rngeEnd := codepointRange[rngeIndex+2:] - rngeStartInt, err := strconv.ParseUint(rngeStart, 16, 64) - if err != nil { - log.Printf("error parsing int: %v", err) - return - } - rngeEndInt, err := strconv.ParseUint(rngeEnd, 16, 64) - if err != nil { - log.Printf("error parsing int: %v", err) - return - } - if rngeStartInt < 0x10000 && rngeEndInt < 0x10000 { - r16 := unicode.Range16{ - Lo: uint16(rngeStartInt), - Hi: uint16(rngeEndInt), - Stride: 1, - } - addR16ToTable(rangeTable, r16) - } else if rngeStartInt >= 0x10000 && rngeEndInt >= 0x10000 { - r32 := unicode.Range32{ - Lo: uint32(rngeStartInt), - Hi: uint32(rngeEndInt), - Stride: 1, - } - addR32ToTable(rangeTable, r32) - } else { - log.Printf("unexpected range") - } - } -} - -func addR16ToTable(r *unicode.RangeTable, r16 unicode.Range16) { - if r.R16 == nil { - r.R16 = make([]unicode.Range16, 0, 1) - } - r.R16 = append(r.R16, r16) - if r16.Hi <= unicode.MaxLatin1 { - r.LatinOffset++ - } -} - -func addR32ToTable(r *unicode.RangeTable, r32 unicode.Range32) { - if r.R32 == nil { - r.R32 = make([]unicode.Range32, 0, 1) - } - r.R32 = append(r.R32, r32) -} - -func generateTables(prefix string, propertyRanges map[string]*unicode.RangeTable) { - prNames := make([]string, 0, len(propertyRanges)) - for k := range propertyRanges { - prNames = append(prNames, k) - } - sort.Strings(prNames) - for _, key := range prNames { - rt := propertyRanges[key] - fmt.Fprintf(output, "var _%s%s = %s\n", prefix, key, generateRangeTable(rt)) - } - fmt.Fprintf(output, "type _%sRuneRange unicode.RangeTable\n", prefix) - - fmt.Fprintf(output, "func _%sRuneType(r rune) *_%sRuneRange {\n", prefix, prefix) - fmt.Fprintf(output, "\tswitch {\n") - for _, key := range prNames { - fmt.Fprintf(output, "\tcase unicode.Is(_%s%s, r):\n\t\treturn (*_%sRuneRange)(_%s%s)\n", prefix, key, prefix, prefix, key) - } - fmt.Fprintf(output, "\tdefault:\n\t\treturn nil\n") - fmt.Fprintf(output, "\t}\n") - fmt.Fprintf(output, "}\n") - - fmt.Fprintf(output, "func (rng *_%sRuneRange) String() string {\n", prefix) - fmt.Fprintf(output, "\tswitch (*unicode.RangeTable)(rng) {\n") - for _, key := range prNames { - fmt.Fprintf(output, "\tcase _%s%s:\n\t\treturn %q\n", prefix, key, key) - } - fmt.Fprintf(output, "\tdefault:\n\t\treturn \"Other\"\n") - fmt.Fprintf(output, "\t}\n") - fmt.Fprintf(output, "}\n") -} - -func generateRangeTable(rt *unicode.RangeTable) string { - rv := "&unicode.RangeTable{\n" - if rt.R16 != nil { - rv += "\tR16: []unicode.Range16{\n" - for _, r16 := range rt.R16 { - rv += fmt.Sprintf("\t\t%#v,\n", r16) - } - rv += "\t},\n" - } - if rt.R32 != nil { - rv += "\tR32: []unicode.Range32{\n" - for _, r32 := range rt.R32 { - rv += fmt.Sprintf("\t\t%#v,\n", r32) - } - rv += "\t},\n" - } - rv += fmt.Sprintf("\t\tLatinOffset: %d,\n", rt.LatinOffset) - rv += "}\n" - return rv -} - -const fileHeader = `// Generated by running -// maketables --url=%s -// DO NOT EDIT - -package textseg - -import( - "unicode" -) -` - -func setupOutput() { - output = bufio.NewWriter(startGofmt()) -} - -// startGofmt connects output to a gofmt process if -output is set. -func startGofmt() io.Writer { - if *outputFile == "" { - return os.Stdout - } - stdout, err := os.Create(*outputFile) - if err != nil { - log.Fatal(err) - } - // Pipe output to gofmt. - gofmt := exec.Command("gofmt") - fd, err := gofmt.StdinPipe() - if err != nil { - log.Fatal(err) - } - gofmt.Stdout = stdout - gofmt.Stderr = os.Stderr - err = gofmt.Start() - if err != nil { - log.Fatal(err) - } - return fd -} - -func flushOutput() { - err := output.Flush() - if err != nil { - log.Fatal(err) - } -} diff --git a/vendor/github.com/apparentlymart/go-textseg/textseg/make_test_tables.go b/vendor/github.com/apparentlymart/go-textseg/textseg/make_test_tables.go deleted file mode 100644 index ac4200260b..0000000000 --- a/vendor/github.com/apparentlymart/go-textseg/textseg/make_test_tables.go +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright (c) 2014 Couchbase, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file -// except in compliance with the License. You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software distributed under the -// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, -// either express or implied. See the License for the specific language governing permissions -// and limitations under the License. - -// +build ignore - -package main - -import ( - "bufio" - "bytes" - "flag" - "fmt" - "io" - "log" - "net/http" - "os" - "os/exec" - "strconv" - "strings" - "unicode" -) - -var url = flag.String("url", - "http://www.unicode.org/Public/"+unicode.Version+"/ucd/auxiliary/", - "URL of Unicode database directory") -var verbose = flag.Bool("verbose", - false, - "write data to stdout as it is parsed") -var localFiles = flag.Bool("local", - false, - "data files have been copied to the current directory; for debugging only") - -var outputFile = flag.String("output", - "", - "output file for generated tables; default stdout") - -var output *bufio.Writer - -func main() { - flag.Parse() - setupOutput() - - graphemeTests := make([]test, 0) - graphemeTests = loadUnicodeData("GraphemeBreakTest.txt", graphemeTests) - wordTests := make([]test, 0) - wordTests = loadUnicodeData("WordBreakTest.txt", wordTests) - sentenceTests := make([]test, 0) - sentenceTests = loadUnicodeData("SentenceBreakTest.txt", sentenceTests) - - fmt.Fprintf(output, fileHeader, *url) - generateTestTables("Grapheme", graphemeTests) - generateTestTables("Word", wordTests) - generateTestTables("Sentence", sentenceTests) - - flushOutput() -} - -// WordBreakProperty.txt has the form: -// 05F0..05F2 ; Hebrew_Letter # Lo [3] HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW LIGATURE YIDDISH DOUBLE YOD -// FB1D ; Hebrew_Letter # Lo HEBREW LETTER YOD WITH HIRIQ -func openReader(file string) (input io.ReadCloser) { - if *localFiles { - f, err := os.Open(file) - if err != nil { - log.Fatal(err) - } - input = f - } else { - path := *url + file - resp, err := http.Get(path) - if err != nil { - log.Fatal(err) - } - if resp.StatusCode != 200 { - log.Fatal("bad GET status for "+file, resp.Status) - } - input = resp.Body - } - return -} - -func loadUnicodeData(filename string, tests []test) []test { - f := openReader(filename) - defer f.Close() - bufioReader := bufio.NewReader(f) - line, err := bufioReader.ReadString('\n') - for err == nil { - tests = parseLine(line, tests) - line, err = bufioReader.ReadString('\n') - } - // if the err was EOF still need to process last value - if err == io.EOF { - tests = parseLine(line, tests) - } - return tests -} - -const comment = "#" -const brk = "÷" -const nbrk = "×" - -type test [][]byte - -func parseLine(line string, tests []test) []test { - if strings.HasPrefix(line, comment) { - return tests - } - line = strings.TrimSpace(line) - if len(line) == 0 { - return tests - } - commentStart := strings.Index(line, comment) - if commentStart > 0 { - line = line[0:commentStart] - } - pieces := strings.Split(line, brk) - t := make(test, 0) - for _, piece := range pieces { - piece = strings.TrimSpace(piece) - if len(piece) > 0 { - codePoints := strings.Split(piece, nbrk) - word := "" - for _, codePoint := range codePoints { - codePoint = strings.TrimSpace(codePoint) - r, err := strconv.ParseInt(codePoint, 16, 64) - if err != nil { - log.Printf("err: %v for '%s'", err, string(r)) - return tests - } - - word += string(r) - } - t = append(t, []byte(word)) - } - } - tests = append(tests, t) - return tests -} - -func generateTestTables(prefix string, tests []test) { - fmt.Fprintf(output, testHeader, prefix) - for _, t := range tests { - fmt.Fprintf(output, "\t\t{\n") - fmt.Fprintf(output, "\t\t\tinput: %#v,\n", bytes.Join(t, []byte{})) - fmt.Fprintf(output, "\t\t\toutput: %s,\n", generateTest(t)) - fmt.Fprintf(output, "\t\t},\n") - } - fmt.Fprintf(output, "}\n") -} - -func generateTest(t test) string { - rv := "[][]byte{" - for _, te := range t { - rv += fmt.Sprintf("%#v,", te) - } - rv += "}" - return rv -} - -const fileHeader = `// Generated by running -// maketesttables --url=%s -// DO NOT EDIT - -package textseg -` - -const testHeader = `var unicode%sTests = []struct { - input []byte - output [][]byte - }{ -` - -func setupOutput() { - output = bufio.NewWriter(startGofmt()) -} - -// startGofmt connects output to a gofmt process if -output is set. -func startGofmt() io.Writer { - if *outputFile == "" { - return os.Stdout - } - stdout, err := os.Create(*outputFile) - if err != nil { - log.Fatal(err) - } - // Pipe output to gofmt. - gofmt := exec.Command("gofmt") - fd, err := gofmt.StdinPipe() - if err != nil { - log.Fatal(err) - } - gofmt.Stdout = stdout - gofmt.Stderr = os.Stderr - err = gofmt.Start() - if err != nil { - log.Fatal(err) - } - return fd -} - -func flushOutput() { - err := output.Flush() - if err != nil { - log.Fatal(err) - } -} diff --git a/vendor/github.com/google/go-github/v29/github/gen-accessors.go b/vendor/github.com/google/go-github/v29/github/gen-accessors.go deleted file mode 100644 index 4c5e8eec7e..0000000000 --- a/vendor/github.com/google/go-github/v29/github/gen-accessors.go +++ /dev/null @@ -1,333 +0,0 @@ -// Copyright 2017 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// gen-accessors generates accessor methods for structs with pointer fields. -// -// It is meant to be used by go-github contributors in conjunction with the -// go generate tool before sending a PR to GitHub. -// Please see the CONTRIBUTING.md file for more information. -package main - -import ( - "bytes" - "flag" - "fmt" - "go/ast" - "go/format" - "go/parser" - "go/token" - "io/ioutil" - "log" - "os" - "sort" - "strings" - "text/template" -) - -const ( - fileSuffix = "-accessors.go" -) - -var ( - verbose = flag.Bool("v", false, "Print verbose log messages") - - sourceTmpl = template.Must(template.New("source").Parse(source)) - - // blacklistStructMethod lists "struct.method" combos to skip. - blacklistStructMethod = map[string]bool{ - "RepositoryContent.GetContent": true, - "Client.GetBaseURL": true, - "Client.GetUploadURL": true, - "ErrorResponse.GetResponse": true, - "RateLimitError.GetResponse": true, - "AbuseRateLimitError.GetResponse": true, - } - // blacklistStruct lists structs to skip. - blacklistStruct = map[string]bool{ - "Client": true, - } -) - -func logf(fmt string, args ...interface{}) { - if *verbose { - log.Printf(fmt, args...) - } -} - -func main() { - flag.Parse() - fset := token.NewFileSet() - - pkgs, err := parser.ParseDir(fset, ".", sourceFilter, 0) - if err != nil { - log.Fatal(err) - return - } - - for pkgName, pkg := range pkgs { - t := &templateData{ - filename: pkgName + fileSuffix, - Year: 2017, - Package: pkgName, - Imports: map[string]string{}, - } - for filename, f := range pkg.Files { - logf("Processing %v...", filename) - if err := t.processAST(f); err != nil { - log.Fatal(err) - } - } - if err := t.dump(); err != nil { - log.Fatal(err) - } - } - logf("Done.") -} - -func (t *templateData) processAST(f *ast.File) error { - for _, decl := range f.Decls { - gd, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - for _, spec := range gd.Specs { - ts, ok := spec.(*ast.TypeSpec) - if !ok { - continue - } - // Skip unexported identifiers. - if !ts.Name.IsExported() { - logf("Struct %v is unexported; skipping.", ts.Name) - continue - } - // Check if the struct is blacklisted. - if blacklistStruct[ts.Name.Name] { - logf("Struct %v is blacklisted; skipping.", ts.Name) - continue - } - st, ok := ts.Type.(*ast.StructType) - if !ok { - continue - } - for _, field := range st.Fields.List { - se, ok := field.Type.(*ast.StarExpr) - if len(field.Names) == 0 || !ok { - continue - } - - fieldName := field.Names[0] - // Skip unexported identifiers. - if !fieldName.IsExported() { - logf("Field %v is unexported; skipping.", fieldName) - continue - } - // Check if "struct.method" is blacklisted. - if key := fmt.Sprintf("%v.Get%v", ts.Name, fieldName); blacklistStructMethod[key] { - logf("Method %v is blacklisted; skipping.", key) - continue - } - - switch x := se.X.(type) { - case *ast.ArrayType: - t.addArrayType(x, ts.Name.String(), fieldName.String()) - case *ast.Ident: - t.addIdent(x, ts.Name.String(), fieldName.String()) - case *ast.MapType: - t.addMapType(x, ts.Name.String(), fieldName.String()) - case *ast.SelectorExpr: - t.addSelectorExpr(x, ts.Name.String(), fieldName.String()) - default: - logf("processAST: type %q, field %q, unknown %T: %+v", ts.Name, fieldName, x, x) - } - } - } - } - return nil -} - -func sourceFilter(fi os.FileInfo) bool { - return !strings.HasSuffix(fi.Name(), "_test.go") && !strings.HasSuffix(fi.Name(), fileSuffix) -} - -func (t *templateData) dump() error { - if len(t.Getters) == 0 { - logf("No getters for %v; skipping.", t.filename) - return nil - } - - // Sort getters by ReceiverType.FieldName. - sort.Sort(byName(t.Getters)) - - var buf bytes.Buffer - if err := sourceTmpl.Execute(&buf, t); err != nil { - return err - } - clean, err := format.Source(buf.Bytes()) - if err != nil { - return err - } - - logf("Writing %v...", t.filename) - return ioutil.WriteFile(t.filename, clean, 0644) -} - -func newGetter(receiverType, fieldName, fieldType, zeroValue string, namedStruct bool) *getter { - return &getter{ - sortVal: strings.ToLower(receiverType) + "." + strings.ToLower(fieldName), - ReceiverVar: strings.ToLower(receiverType[:1]), - ReceiverType: receiverType, - FieldName: fieldName, - FieldType: fieldType, - ZeroValue: zeroValue, - NamedStruct: namedStruct, - } -} - -func (t *templateData) addArrayType(x *ast.ArrayType, receiverType, fieldName string) { - var eltType string - switch elt := x.Elt.(type) { - case *ast.Ident: - eltType = elt.String() - default: - logf("addArrayType: type %q, field %q: unknown elt type: %T %+v; skipping.", receiverType, fieldName, elt, elt) - return - } - - t.Getters = append(t.Getters, newGetter(receiverType, fieldName, "[]"+eltType, "nil", false)) -} - -func (t *templateData) addIdent(x *ast.Ident, receiverType, fieldName string) { - var zeroValue string - var namedStruct = false - switch x.String() { - case "int", "int64": - zeroValue = "0" - case "string": - zeroValue = `""` - case "bool": - zeroValue = "false" - case "Timestamp": - zeroValue = "Timestamp{}" - default: - zeroValue = "nil" - namedStruct = true - } - - t.Getters = append(t.Getters, newGetter(receiverType, fieldName, x.String(), zeroValue, namedStruct)) -} - -func (t *templateData) addMapType(x *ast.MapType, receiverType, fieldName string) { - var keyType string - switch key := x.Key.(type) { - case *ast.Ident: - keyType = key.String() - default: - logf("addMapType: type %q, field %q: unknown key type: %T %+v; skipping.", receiverType, fieldName, key, key) - return - } - - var valueType string - switch value := x.Value.(type) { - case *ast.Ident: - valueType = value.String() - default: - logf("addMapType: type %q, field %q: unknown value type: %T %+v; skipping.", receiverType, fieldName, value, value) - return - } - - fieldType := fmt.Sprintf("map[%v]%v", keyType, valueType) - zeroValue := fmt.Sprintf("map[%v]%v{}", keyType, valueType) - t.Getters = append(t.Getters, newGetter(receiverType, fieldName, fieldType, zeroValue, false)) -} - -func (t *templateData) addSelectorExpr(x *ast.SelectorExpr, receiverType, fieldName string) { - if strings.ToLower(fieldName[:1]) == fieldName[:1] { // Non-exported field. - return - } - - var xX string - if xx, ok := x.X.(*ast.Ident); ok { - xX = xx.String() - } - - switch xX { - case "time", "json": - if xX == "json" { - t.Imports["encoding/json"] = "encoding/json" - } else { - t.Imports[xX] = xX - } - fieldType := fmt.Sprintf("%v.%v", xX, x.Sel.Name) - zeroValue := fmt.Sprintf("%v.%v{}", xX, x.Sel.Name) - if xX == "time" && x.Sel.Name == "Duration" { - zeroValue = "0" - } - t.Getters = append(t.Getters, newGetter(receiverType, fieldName, fieldType, zeroValue, false)) - default: - logf("addSelectorExpr: xX %q, type %q, field %q: unknown x=%+v; skipping.", xX, receiverType, fieldName, x) - } -} - -type templateData struct { - filename string - Year int - Package string - Imports map[string]string - Getters []*getter -} - -type getter struct { - sortVal string // Lower-case version of "ReceiverType.FieldName". - ReceiverVar string // The one-letter variable name to match the ReceiverType. - ReceiverType string - FieldName string - FieldType string - ZeroValue string - NamedStruct bool // Getter for named struct. -} - -type byName []*getter - -func (b byName) Len() int { return len(b) } -func (b byName) Less(i, j int) bool { return b[i].sortVal < b[j].sortVal } -func (b byName) Swap(i, j int) { b[i], b[j] = b[j], b[i] } - -const source = `// Copyright {{.Year}} The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by gen-accessors; DO NOT EDIT. - -package {{.Package}} -{{with .Imports}} -import ( - {{- range . -}} - "{{.}}" - {{end -}} -) -{{end}} -{{range .Getters}} -{{if .NamedStruct}} -// Get{{.FieldName}} returns the {{.FieldName}} field. -func ({{.ReceiverVar}} *{{.ReceiverType}}) Get{{.FieldName}}() *{{.FieldType}} { - if {{.ReceiverVar}} == nil { - return {{.ZeroValue}} - } - return {{.ReceiverVar}}.{{.FieldName}} -} -{{else}} -// Get{{.FieldName}} returns the {{.FieldName}} field if it's non-nil, zero value otherwise. -func ({{.ReceiverVar}} *{{.ReceiverType}}) Get{{.FieldName}}() {{.FieldType}} { - if {{.ReceiverVar}} == nil || {{.ReceiverVar}}.{{.FieldName}} == nil { - return {{.ZeroValue}} - } - return *{{.ReceiverVar}}.{{.FieldName}} -} -{{end}} -{{end}} -` diff --git a/vendor/github.com/google/go-github/v29/github/gen-stringify-test.go b/vendor/github.com/google/go-github/v29/github/gen-stringify-test.go deleted file mode 100644 index 7803801e63..0000000000 --- a/vendor/github.com/google/go-github/v29/github/gen-stringify-test.go +++ /dev/null @@ -1,358 +0,0 @@ -// Copyright 2019 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// gen-stringify-test generates test methods to test the String methods. -// -// These tests eliminate most of the code coverage problems so that real -// code coverage issues can be more readily identified. -// -// It is meant to be used by go-github contributors in conjunction with the -// go generate tool before sending a PR to GitHub. -// Please see the CONTRIBUTING.md file for more information. -package main - -import ( - "bytes" - "flag" - "fmt" - "go/ast" - "go/format" - "go/parser" - "go/token" - "io/ioutil" - "log" - "os" - "strings" - "text/template" -) - -const ( - ignoreFilePrefix1 = "gen-" - ignoreFilePrefix2 = "github-" - outputFileSuffix = "-stringify_test.go" -) - -var ( - verbose = flag.Bool("v", false, "Print verbose log messages") - - // blacklistStructMethod lists "struct.method" combos to skip. - blacklistStructMethod = map[string]bool{} - // blacklistStruct lists structs to skip. - blacklistStruct = map[string]bool{ - "RateLimits": true, - } - - funcMap = template.FuncMap{ - "isNotLast": func(index int, slice []*structField) string { - if index+1 < len(slice) { - return ", " - } - return "" - }, - "processZeroValue": func(v string) string { - switch v { - case "Bool(false)": - return "false" - case "Float64(0.0)": - return "0" - case "0", "Int(0)", "Int64(0)": - return "0" - case `""`, `String("")`: - return `""` - case "Timestamp{}", "&Timestamp{}": - return "github.Timestamp{0001-01-01 00:00:00 +0000 UTC}" - case "nil": - return "map[]" - } - log.Fatalf("Unhandled zero value: %q", v) - return "" - }, - } - - sourceTmpl = template.Must(template.New("source").Funcs(funcMap).Parse(source)) -) - -func main() { - flag.Parse() - fset := token.NewFileSet() - - pkgs, err := parser.ParseDir(fset, ".", sourceFilter, 0) - if err != nil { - log.Fatal(err) - return - } - - for pkgName, pkg := range pkgs { - t := &templateData{ - filename: pkgName + outputFileSuffix, - Year: 2019, // No need to change this once set (even in following years). - Package: pkgName, - Imports: map[string]string{"testing": "testing"}, - StringFuncs: map[string]bool{}, - StructFields: map[string][]*structField{}, - } - for filename, f := range pkg.Files { - logf("Processing %v...", filename) - if err := t.processAST(f); err != nil { - log.Fatal(err) - } - } - if err := t.dump(); err != nil { - log.Fatal(err) - } - } - logf("Done.") -} - -func sourceFilter(fi os.FileInfo) bool { - return !strings.HasSuffix(fi.Name(), "_test.go") && - !strings.HasPrefix(fi.Name(), ignoreFilePrefix1) && - !strings.HasPrefix(fi.Name(), ignoreFilePrefix2) -} - -type templateData struct { - filename string - Year int - Package string - Imports map[string]string - StringFuncs map[string]bool - StructFields map[string][]*structField -} - -type structField struct { - sortVal string // Lower-case version of "ReceiverType.FieldName". - ReceiverVar string // The one-letter variable name to match the ReceiverType. - ReceiverType string - FieldName string - FieldType string - ZeroValue string - NamedStruct bool // Getter for named struct. -} - -func (t *templateData) processAST(f *ast.File) error { - for _, decl := range f.Decls { - fn, ok := decl.(*ast.FuncDecl) - if ok { - if fn.Recv != nil && len(fn.Recv.List) > 0 { - id, ok := fn.Recv.List[0].Type.(*ast.Ident) - if ok && fn.Name.Name == "String" { - logf("Got FuncDecl: Name=%q, id.Name=%#v", fn.Name.Name, id.Name) - t.StringFuncs[id.Name] = true - } else { - logf("Ignoring FuncDecl: Name=%q, Type=%T", fn.Name.Name, fn.Recv.List[0].Type) - } - } else { - logf("Ignoring FuncDecl: Name=%q, fn=%#v", fn.Name.Name, fn) - } - continue - } - - gd, ok := decl.(*ast.GenDecl) - if !ok { - logf("Ignoring AST decl type %T", decl) - continue - } - for _, spec := range gd.Specs { - ts, ok := spec.(*ast.TypeSpec) - if !ok { - continue - } - // Skip unexported identifiers. - if !ts.Name.IsExported() { - logf("Struct %v is unexported; skipping.", ts.Name) - continue - } - // Check if the struct is blacklisted. - if blacklistStruct[ts.Name.Name] { - logf("Struct %v is blacklisted; skipping.", ts.Name) - continue - } - st, ok := ts.Type.(*ast.StructType) - if !ok { - logf("Ignoring AST type %T, Name=%q", ts.Type, ts.Name.String()) - continue - } - for _, field := range st.Fields.List { - if len(field.Names) == 0 { - continue - } - - fieldName := field.Names[0] - if id, ok := field.Type.(*ast.Ident); ok { - t.addIdent(id, ts.Name.String(), fieldName.String()) - continue - } - - if _, ok := field.Type.(*ast.MapType); ok { - t.addMapType(ts.Name.String(), fieldName.String()) - continue - } - - se, ok := field.Type.(*ast.StarExpr) - if !ok { - logf("Ignoring type %T for Name=%q, FieldName=%q", field.Type, ts.Name.String(), fieldName.String()) - continue - } - - // Skip unexported identifiers. - if !fieldName.IsExported() { - logf("Field %v is unexported; skipping.", fieldName) - continue - } - // Check if "struct.method" is blacklisted. - if key := fmt.Sprintf("%v.Get%v", ts.Name, fieldName); blacklistStructMethod[key] { - logf("Method %v is blacklisted; skipping.", key) - continue - } - - switch x := se.X.(type) { - case *ast.ArrayType: - case *ast.Ident: - t.addIdentPtr(x, ts.Name.String(), fieldName.String()) - case *ast.MapType: - case *ast.SelectorExpr: - default: - logf("processAST: type %q, field %q, unknown %T: %+v", ts.Name, fieldName, x, x) - } - } - } - } - return nil -} - -func (t *templateData) addMapType(receiverType, fieldName string) { - t.StructFields[receiverType] = append(t.StructFields[receiverType], newStructField(receiverType, fieldName, "map[]", "nil", false)) -} - -func (t *templateData) addIdent(x *ast.Ident, receiverType, fieldName string) { - var zeroValue string - var namedStruct = false - switch x.String() { - case "int": - zeroValue = "0" - case "int64": - zeroValue = "0" - case "float64": - zeroValue = "0.0" - case "string": - zeroValue = `""` - case "bool": - zeroValue = "false" - case "Timestamp": - zeroValue = "Timestamp{}" - default: - zeroValue = "nil" - namedStruct = true - } - - t.StructFields[receiverType] = append(t.StructFields[receiverType], newStructField(receiverType, fieldName, x.String(), zeroValue, namedStruct)) -} - -func (t *templateData) addIdentPtr(x *ast.Ident, receiverType, fieldName string) { - var zeroValue string - var namedStruct = false - switch x.String() { - case "int": - zeroValue = "Int(0)" - case "int64": - zeroValue = "Int64(0)" - case "float64": - zeroValue = "Float64(0.0)" - case "string": - zeroValue = `String("")` - case "bool": - zeroValue = "Bool(false)" - case "Timestamp": - zeroValue = "&Timestamp{}" - default: - zeroValue = "nil" - namedStruct = true - } - - t.StructFields[receiverType] = append(t.StructFields[receiverType], newStructField(receiverType, fieldName, x.String(), zeroValue, namedStruct)) -} - -func (t *templateData) dump() error { - if len(t.StructFields) == 0 { - logf("No StructFields for %v; skipping.", t.filename) - return nil - } - - // Remove unused structs. - var toDelete []string - for k := range t.StructFields { - if !t.StringFuncs[k] { - toDelete = append(toDelete, k) - continue - } - } - for _, k := range toDelete { - delete(t.StructFields, k) - } - - var buf bytes.Buffer - if err := sourceTmpl.Execute(&buf, t); err != nil { - return err - } - clean, err := format.Source(buf.Bytes()) - if err != nil { - log.Printf("failed-to-format source:\n%v", buf.String()) - return err - } - - logf("Writing %v...", t.filename) - return ioutil.WriteFile(t.filename, clean, 0644) -} - -func newStructField(receiverType, fieldName, fieldType, zeroValue string, namedStruct bool) *structField { - return &structField{ - sortVal: strings.ToLower(receiverType) + "." + strings.ToLower(fieldName), - ReceiverVar: strings.ToLower(receiverType[:1]), - ReceiverType: receiverType, - FieldName: fieldName, - FieldType: fieldType, - ZeroValue: zeroValue, - NamedStruct: namedStruct, - } -} - -func logf(fmt string, args ...interface{}) { - if *verbose { - log.Printf(fmt, args...) - } -} - -const source = `// Copyright {{.Year}} The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by gen-stringify-tests; DO NOT EDIT. - -package {{ $package := .Package}}{{$package}} -{{with .Imports}} -import ( - {{- range . -}} - "{{.}}" - {{end -}} -) -{{end}} -func Float64(v float64) *float64 { return &v } -{{range $key, $value := .StructFields}} -func Test{{ $key }}_String(t *testing.T) { - v := {{ $key }}{ {{range .}}{{if .NamedStruct}} - {{ .FieldName }}: &{{ .FieldType }}{},{{else}} - {{ .FieldName }}: {{.ZeroValue}},{{end}}{{end}} - } - want := ` + "`" + `{{ $package }}.{{ $key }}{{ $slice := . }}{ -{{- range $ind, $val := .}}{{if .NamedStruct}}{{ .FieldName }}:{{ $package }}.{{ .FieldType }}{}{{else}}{{ .FieldName }}:{{ processZeroValue .ZeroValue }}{{end}}{{ isNotLast $ind $slice }}{{end}}}` + "`" + ` - if got := v.String(); got != want { - t.Errorf("{{ $key }}.String = %v, want %v", got, want) - } -} -{{end}} -` diff --git a/vendor/github.com/hashicorp/go-getter/.travis.yml b/vendor/github.com/hashicorp/go-getter/.travis.yml deleted file mode 100644 index 4fe9176aab..0000000000 --- a/vendor/github.com/hashicorp/go-getter/.travis.yml +++ /dev/null @@ -1,24 +0,0 @@ -sudo: false - -addons: - apt: - sources: - - sourceline: 'ppa:git-core/ppa' - packages: - - git - -language: go - -os: - - linux - - osx - -go: - - "1.11.x" - -before_script: - - go build ./cmd/go-getter - -branches: - only: - - master diff --git a/vendor/github.com/hashicorp/go-getter/README.md b/vendor/github.com/hashicorp/go-getter/README.md index 3de23c7094..bbcd15de96 100644 --- a/vendor/github.com/hashicorp/go-getter/README.md +++ b/vendor/github.com/hashicorp/go-getter/README.md @@ -1,10 +1,10 @@ # go-getter -[![Build Status](http://img.shields.io/travis/hashicorp/go-getter.svg?style=flat-square)][travis] +[![CircleCI](https://circleci.com/gh/hashicorp/go-getter/tree/master.svg?style=svg)][circleci] [![Build status](https://ci.appveyor.com/api/projects/status/ulq3qr43n62croyq/branch/master?svg=true)][appveyor] [![Go Documentation](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)][godocs] -[travis]: http://travis-ci.org/hashicorp/go-getter +[circleci]: https://circleci.com/gh/hashicorp/go-getter/tree/master [godocs]: http://godoc.org/github.com/hashicorp/go-getter [appveyor]: https://ci.appveyor.com/project/hashicorp/go-getter/branch/master @@ -356,3 +356,7 @@ In order to access to GCS, authentication credentials should be provided. More i - gcs::https://www.googleapis.com/storage/v1/bucket - gcs::https://www.googleapis.com/storage/v1/bucket/foo.zip - www.googleapis.com/storage/v1/bucket/foo + +#### GCS Testing + +The tests for `get_gcs.go` require you to have GCP credentials set in your environment. These credentials can have any level of permissions to any project, they just need to exist. This means setting `GOOGLE_APPLICATION_CREDENTIALS="~/path/to/credentials.json"` or `GOOGLE_CREDENTIALS="{stringified-credentials-json}"`. Due to this configuration, `get_gcs_test.go` will fail for external contributors in CircleCI. diff --git a/vendor/github.com/hashicorp/go-getter/client.go b/vendor/github.com/hashicorp/go-getter/client.go index 007a78ba7c..38fb43b8f5 100644 --- a/vendor/github.com/hashicorp/go-getter/client.go +++ b/vendor/github.com/hashicorp/go-getter/client.go @@ -19,7 +19,7 @@ import ( // Using a client directly allows more fine-grained control over how downloading // is done, as well as customizing the protocols supported. type Client struct { - // Ctx for cancellation + // Ctx for cancellation Ctx context.Context // Src is the source URL to get. diff --git a/vendor/github.com/hashicorp/go-getter/get_git.go b/vendor/github.com/hashicorp/go-getter/get_git.go index bb1ec316d3..1b9f4be819 100644 --- a/vendor/github.com/hashicorp/go-getter/get_git.go +++ b/vendor/github.com/hashicorp/go-getter/get_git.go @@ -1,6 +1,7 @@ package getter import ( + "bytes" "context" "encoding/base64" "fmt" @@ -9,6 +10,7 @@ import ( "os" "os/exec" "path/filepath" + "regexp" "runtime" "strconv" "strings" @@ -24,6 +26,8 @@ type GitGetter struct { getter } +var defaultBranchRegexp = regexp.MustCompile(`\s->\sorigin/(.*)`) + func (g *GitGetter) ClientMode(_ *url.URL) (ClientMode, error) { return ClientModeDir, nil } @@ -182,10 +186,10 @@ func (g *GitGetter) update(ctx context.Context, dst, sshKeyFile, ref string, dep cmd.Dir = dst if getRunCommand(cmd) != nil { - // Not a branch, switch to master. This will also catch non-existent - // branches, in which case we want to switch to master and then - // checkout the proper branch later. - ref = "master" + // Not a branch, switch to default branch. This will also catch + // non-existent branches, in which case we want to switch to default + // and then checkout the proper branch later. + ref = findDefaultBranch(dst) } // We have to be on a branch to pull @@ -216,6 +220,22 @@ func (g *GitGetter) fetchSubmodules(ctx context.Context, dst, sshKeyFile string, return getRunCommand(cmd) } +// findDefaultBranch checks the repo's origin remote for its default branch +// (generally "master"). "master" is returned if an origin default branch +// can't be determined. +func findDefaultBranch(dst string) string { + var stdoutbuf bytes.Buffer + cmd := exec.Command("git", "branch", "-r", "--points-at", "refs/remotes/origin/HEAD") + cmd.Dir = dst + cmd.Stdout = &stdoutbuf + err := cmd.Run() + matches := defaultBranchRegexp.FindStringSubmatch(stdoutbuf.String()) + if err != nil || matches == nil { + return "master" + } + return matches[len(matches)-1] +} + // setupGitEnv sets up the environment for the given command. This is used to // pass configuration data to git and ssh and enables advanced cloning methods. func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) { diff --git a/vendor/github.com/hashicorp/go-getter/get_http.go b/vendor/github.com/hashicorp/go-getter/get_http.go index 7c4541c6e9..9ffdba78a5 100644 --- a/vendor/github.com/hashicorp/go-getter/get_http.go +++ b/vendor/github.com/hashicorp/go-getter/get_http.go @@ -9,7 +9,6 @@ import ( "net/url" "os" "path/filepath" - "strconv" "strings" safetemp "github.com/hashicorp/go-safetemp" @@ -88,7 +87,10 @@ func (g *HttpGetter) Get(dst string, u *url.URL) error { return err } - req.Header = g.Header + if g.Header != nil { + req.Header = g.Header + } + resp, err := g.Client.Do(req) if err != nil { return err @@ -128,6 +130,12 @@ func (g *HttpGetter) Get(dst string, u *url.URL) error { return g.getSubdir(ctx, dst, source, subDir) } +// GetFile fetches the file from src and stores it at dst. +// If the server supports Accept-Range, HttpGetter will attempt a range +// request. This means it is the caller's responsibility to ensure that an +// older version of the destination file does not exist, else it will be either +// falsely identified as being replaced, or corrupted with extra bytes +// appended. func (g *HttpGetter) GetFile(dst string, src *url.URL) error { ctx := g.Context() if g.Netrc { @@ -136,7 +144,6 @@ func (g *HttpGetter) GetFile(dst string, src *url.URL) error { return err } } - // Create all the parent directories if needed if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { return err @@ -165,18 +172,17 @@ func (g *HttpGetter) GetFile(dst string, src *url.URL) error { req.Header = g.Header } headResp, err := g.Client.Do(req) - if err == nil && headResp != nil { + if err == nil { headResp.Body.Close() if headResp.StatusCode == 200 { // If the HEAD request succeeded, then attempt to set the range // query if we can. - if headResp.Header.Get("Accept-Ranges") == "bytes" { + if headResp.Header.Get("Accept-Ranges") == "bytes" && headResp.ContentLength >= 0 { if fi, err := f.Stat(); err == nil { - if _, err = f.Seek(0, os.SEEK_END); err == nil { - req.Header.Set("Range", fmt.Sprintf("bytes=%d-", fi.Size())) + if _, err = f.Seek(0, io.SeekEnd); err == nil { currentFileSize = fi.Size() - totalFileSize, _ := strconv.ParseInt(headResp.Header.Get("Content-Length"), 10, 64) - if currentFileSize >= totalFileSize { + req.Header.Set("Range", fmt.Sprintf("bytes=%d-", currentFileSize)) + if currentFileSize >= headResp.ContentLength { // file already present return nil } diff --git a/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md b/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md index ccb46bbd8a..4c644fcfb1 100644 --- a/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md +++ b/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md @@ -1,5 +1,38 @@ # HCL Changelog +## v2.3.0 (Jan 3, 2020) + +### Enhancements + +* ext/tryfunc: Optional functions `try` and `can` to include in your `hcl.EvalContext` when evaluating expressions, which allow users to make decisions based on the success of expressions. ([#330](https://github.com/hashicorp/hcl/pull/330)) +* ext/typeexpr: Now has an optional function `convert` which you can include in your `hcl.EvalContext` when evaluating expressions, allowing users to convert values to specific type constraints using the type constraint expression syntax. ([#330](https://github.com/hashicorp/hcl/pull/330)) +* ext/typeexpr: A new `cty` capsule type `typeexpr.TypeConstraintType` which, when used as either a type constraint for a function parameter or as a type constraint for a `hcldec` attribute specification will cause the given expression to be interpreted as a type constraint expression rather than a value expression. ([#330](https://github.com/hashicorp/hcl/pull/330)) +* ext/customdecode: An optional extension that allows overriding the static decoding behavior for expressions either in function arguments or `hcldec` attribute specifications. ([#330](https://github.com/hashicorp/hcl/pull/330)) +* ext/customdecode: New `cty` capsuletypes `customdecode.ExpressionType` and `customdecode.ExpressionClosureType` which, when used as either a type constraint for a function parameter or as a type constraint for a `hcldec` attribute specification will cause the given expression (and, for the closure type, also the `hcl.EvalContext` it was evaluated in) to be captured for later analysis, rather than immediately evaluated. ([#330](https://github.com/hashicorp/hcl/pull/330)) + +## v2.2.0 (Dec 11, 2019) + +### Enhancements + +* hcldec: Attribute evaluation (as part of `AttrSpec` or `BlockAttrsSpec`) now captures expression evaluation metadata in any errors it produces during type conversions, allowing for better feedback in calling applications that are able to make use of this metadata when printing diagnostic messages. ([#329](https://github.com/hashicorp/hcl/pull/329)) + +### Bugs Fixed + +* hclsyntax: `IndexExpr`, `SplatExpr`, and `RelativeTraversalExpr` will now report a source range that covers all of their child expression nodes. Previously they would report only the operator part, such as `["foo"]`, `[*]`, or `.foo`, which was problematic for callers using source ranges for code analysis. ([#328](https://github.com/hashicorp/hcl/pull/328)) +* hclwrite: Parser will no longer panic when the input includes index, splat, or relative traversal syntax. ([#328](https://github.com/hashicorp/hcl/pull/328)) + +## v2.1.0 (Nov 19, 2019) + +### Enhancements + +* gohcl: When decoding into a struct value with some fields already populated, those values will be retained if not explicitly overwritten in the given HCL body, with similar overriding/merging behavior as `json.Unmarshal` in the Go standard library. +* hclwrite: New interface to set the expression for an attribute to be a raw token sequence, with no special processing. This has some caveats, so if you intend to use it please refer to the godoc comments. ([#320](https://github.com/hashicorp/hcl/pull/320)) + +### Bugs Fixed + +* hclwrite: The `Body.Blocks` method was returing the blocks in an indefined order, rather than preserving the order of declaration in the source input. ([#313](https://github.com/hashicorp/hcl/pull/313)) +* hclwrite: The `TokensForTraversal` function (and thus in turn the `Body.SetAttributeTraversal` method) was not correctly handling index steps in traversals, and thus producing invalid results. ([#319](https://github.com/hashicorp/hcl/pull/319)) + ## v2.0.0 (Oct 2, 2019) Initial release of HCL 2, which is a new implementating combining the HCL 1 diff --git a/vendor/github.com/hashicorp/hcl/v2/README.md b/vendor/github.com/hashicorp/hcl/v2/README.md index d807a42456..3d0d509d53 100644 --- a/vendor/github.com/hashicorp/hcl/v2/README.md +++ b/vendor/github.com/hashicorp/hcl/v2/README.md @@ -8,7 +8,7 @@ towards devops tools, servers, etc. > **NOTE:** This is major version 2 of HCL, whose Go API is incompatible with > major version 1. Both versions are available for selection in Go Modules > projects. HCL 2 _cannot_ be imported from Go projects that are not using Go Modules. For more information, see -> [our version selection guide](https://github.com/golang/go/wiki/Version-Selection). +> [our version selection guide](https://github.com/hashicorp/hcl/wiki/Version-Selection). HCL has both a _native syntax_, intended to be pleasant to read and write for humans, and a JSON-based variant that is easier for machines to generate @@ -51,7 +51,8 @@ func main() { ``` A lower-level API is available for applications that need more control over -the parsing, decoding, and evaluation of configuration. +the parsing, decoding, and evaluation of configuration. For more information, +see [the package documentation](https://pkg.go.dev/github.com/hashicorp/hcl/v2). ## Why? @@ -156,9 +157,9 @@ syntax allows use of arbitrary expressions within JSON strings: For more information, see the detailed specifications: -* [Syntax-agnostic Information Model](hcl/spec.md) -* [HCL Native Syntax](hcl/hclsyntax/spec.md) -* [JSON Representation](hcl/json/spec.md) +* [Syntax-agnostic Information Model](spec.md) +* [HCL Native Syntax](hclsyntax/spec.md) +* [JSON Representation](json/spec.md) ## Changes in 2.0 diff --git a/vendor/github.com/hashicorp/hcl/v2/appveyor.yml b/vendor/github.com/hashicorp/hcl/v2/appveyor.yml new file mode 100644 index 0000000000..e382f8f571 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/v2/appveyor.yml @@ -0,0 +1,13 @@ +build: off + +clone_folder: c:\gopath\src\github.com\hashicorp\hcl + +environment: + GOPATH: c:\gopath + GO111MODULE: on + GOPROXY: https://goproxy.io + +stack: go 1.12 + +test_script: + - go test ./... diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/README.md b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/README.md new file mode 100644 index 0000000000..1636f577a0 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/README.md @@ -0,0 +1,209 @@ +# HCL Custom Static Decoding Extension + +This HCL extension provides a mechanism for defining arguments in an HCL-based +language whose values are derived using custom decoding rules against the +HCL expression syntax, overriding the usual behavior of normal expression +evaluation. + +"Arguments", for the purpose of this extension, currently includes the +following two contexts: + +* For applications using `hcldec` for dynamic decoding, a `hcldec.AttrSpec` + or `hcldec.BlockAttrsSpec` can be given a special type constraint that + opts in to custom decoding behavior for the attribute(s) that are selected + by that specification. + +* When working with the HCL native expression syntax, a function given in + the `hcl.EvalContext` during evaluation can have parameters with special + type constraints that opt in to custom decoding behavior for the argument + expression associated with that parameter in any call. + +The above use-cases are rather abstract, so we'll consider a motivating +real-world example: sometimes we (language designers) need to allow users +to specify type constraints directly in the language itself, such as in +[Terraform's Input Variables](https://www.terraform.io/docs/configuration/variables.html). +Terraform's `variable` blocks include an argument called `type` which takes +a type constraint given using HCL expression building-blocks as defined by +[the HCL `typeexpr` extension](../typeexpr/README.md). + +A "type constraint expression" of that sort is not an expression intended to +be evaluated in the usual way. Instead, the physical expression is +deconstructed using [the static analysis operations](../../spec.md#static-analysis) +to produce a `cty.Type` as the result, rather than a `cty.Value`. + +The purpose of this Custom Static Decoding Extension, then, is to provide a +bridge to allow that sort of custom decoding to be used via mechanisms that +normally deal in `cty.Value`, such as `hcldec` and native syntax function +calls as listed above. + +(Note: [`gohcl`](https://pkg.go.dev/github.com/hashicorp/hcl/v2/gohcl) has +its own mechanism to support this use case, exploiting the fact that it is +working directly with "normal" Go types. Decoding into a struct field of +type `hcl.Expression` obtains the expression directly without evaluating it +first. The Custom Static Decoding Extension is not necessary for that `gohcl` +technique. You can also implement custom decoding by working directly with +the lowest-level HCL API, which separates extraction of and evaluation of +expressions into two steps.) + +## Custom Decoding Types + +This extension relies on a convention implemented in terms of +[_Capsule Types_ in the underlying `cty` type system](https://github.com/zclconf/go-cty/blob/master/docs/types.md#capsule-types). `cty` allows a capsule type to carry arbitrary +extension metadata values as an aid to creating higher-level abstractions like +this extension. + +A custom argument decoding mode, then, is implemented by creating a new `cty` +capsule type that implements the `ExtensionData` custom operation to return +a decoding function when requested. For example: + +```go +var keywordType cty.Type +keywordType = cty.CapsuleWithOps("keyword", reflect.TypeOf(""), &cty.CapsuleOps{ + ExtensionData: func(key interface{}) interface{} { + switch key { + case customdecode.CustomExpressionDecoder: + return customdecode.CustomExpressionDecoderFunc( + func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { + var diags hcl.Diagnostics + kw := hcl.ExprAsKeyword(expr) + if kw == "" { + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid keyword", + Detail: "A keyword is required", + Subject: expr.Range().Ptr(), + }) + return cty.UnkownVal(keywordType), diags + } + return cty.CapsuleVal(keywordType, &kw) + }, + ) + default: + return nil + } + }, +}) +``` + +The boilerplate here is a bit fussy, but the important part for our purposes +is the `case customdecode.CustomExpressionDecoder:` clause, which uses +a custom extension key type defined in this package to recognize when a +component implementing this extension is checking to see if a target type +has a custom decode implementation. + +In the above case we've defined a type that decodes expressions as static +keywords, so a keyword like `foo` would decode as an encapsulated `"foo"` +string, while any other sort of expression like `"baz"` or `1 + 1` would +return an error. + +We could then use `keywordType` as a type constraint either for a function +parameter or a `hcldec` attribute specification, which would require the +argument for that function parameter or the expression for the matching +attributes to be a static keyword, rather than an arbitrary expression. +For example, in a `hcldec.AttrSpec`: + +```go +keywordSpec := &hcldec.AttrSpec{ + Name: "keyword", + Type: keywordType, +} +``` + +The above would accept input like the following and would set its result to +a `cty.Value` of `keywordType`, after decoding: + +```hcl +keyword = foo +``` + +## The Expression and Expression Closure `cty` types + +Building on the above, this package also includes two capsule types that use +the above mechanism to allow calling applications to capture expressions +directly and thus defer analysis to a later step, after initial decoding. + +The `customdecode.ExpressionType` type encapsulates an `hcl.Expression` alone, +for situations like our type constraint expression example above where it's +the static structure of the expression we want to inspect, and thus any +variables and functions defined in the evaluation context are irrelevant. + +The `customdecode.ExpressionClosureType` type encapsulates a +`*customdecode.ExpressionClosure` value, which binds the given expression to +the `hcl.EvalContext` it was asked to evaluate against and thus allows the +receiver of that result to later perform normal evaluation of the expression +with all the same variables and functions that would've been available to it +naturally. + +Both of these types can be used as type constraints either for `hcldec` +attribute specifications or for function arguments. Here's an example of +`ExpressionClosureType` to implement a function that can evaluate +an expression with some additional variables defined locally, which we'll +call the `with(...)` function: + +```go +var WithFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "variables", + Type: cty.DynamicPseudoType, + }, + { + Name: "expression", + Type: customdecode.ExpressionClosureType, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + varsVal := args[0] + exprVal := args[1] + if !varsVal.Type().IsObjectType() { + return cty.NilVal, function.NewArgErrorf(0, "must be an object defining local variables") + } + if !varsVal.IsKnown() { + // We can't predict our result type until the variables object + // is known. + return cty.DynamicPseudoType, nil + } + vars := varsVal.AsValueMap() + closure := customdecode.ExpressionClosureFromVal(exprVal) + result, err := evalWithLocals(vars, closure) + if err != nil { + return cty.NilVal, err + } + return result.Type(), nil + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + varsVal := args[0] + exprVal := args[1] + vars := varsVal.AsValueMap() + closure := customdecode.ExpressionClosureFromVal(exprVal) + return evalWithLocals(vars, closure) + }, +}) + +func evalWithLocals(locals map[string]cty.Value, closure *customdecode.ExpressionClosure) (cty.Value, error) { + childCtx := closure.EvalContext.NewChild() + childCtx.Variables = locals + val, diags := closure.Expression.Value(childCtx) + if diags.HasErrors() { + return cty.NilVal, function.NewArgErrorf(1, "couldn't evaluate expression: %s", diags.Error()) + } + return val, nil +} +``` + +If the above function were placed into an `hcl.EvalContext` as `with`, it +could be used in a native syntax call to that function as follows: + +```hcl + foo = with({name = "Cory"}, "${greeting}, ${name}!") +``` + +The above assumes a variable in the main context called `greeting`, to which +the `with` function adds `name` before evaluating the expression given in +its second argument. This makes that second argument context-sensitive -- it +would behave differently if the user wrote the same thing somewhere else -- so +this capability should be used with care to make sure it doesn't cause confusion +for the end-users of your language. + +There are some other examples of this capability to evaluate expressions in +unusual ways in the `tryfunc` directory that is a sibling of this one. diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/customdecode.go b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/customdecode.go new file mode 100644 index 0000000000..c9d7a1efb2 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/customdecode.go @@ -0,0 +1,56 @@ +// Package customdecode contains a HCL extension that allows, in certain +// contexts, expression evaluation to be overridden by custom static analysis. +// +// This mechanism is only supported in certain specific contexts where +// expressions are decoded with a specific target type in mind. For more +// information, see the documentation on CustomExpressionDecoder. +package customdecode + +import ( + "github.com/hashicorp/hcl/v2" + "github.com/zclconf/go-cty/cty" +) + +type customDecoderImpl int + +// CustomExpressionDecoder is a value intended to be used as a cty capsule +// type ExtensionData key for capsule types whose values are to be obtained +// by static analysis of an expression rather than normal evaluation of that +// expression. +// +// When a cooperating capsule type is asked for ExtensionData with this key, +// it must return a non-nil CustomExpressionDecoderFunc value. +// +// This mechanism is not universally supported; instead, it's handled in a few +// specific places where expressions are evaluated with the intent of producing +// a cty.Value of a type given by the calling application. +// +// Specifically, this currently works for type constraints given in +// hcldec.AttrSpec and hcldec.BlockAttrsSpec, and it works for arguments to +// function calls in the HCL native syntax. HCL extensions implemented outside +// of the main HCL module may also implement this; consult their own +// documentation for details. +const CustomExpressionDecoder = customDecoderImpl(1) + +// CustomExpressionDecoderFunc is the type of value that must be returned by +// a capsule type handling the key CustomExpressionDecoder in its ExtensionData +// implementation. +// +// If no error diagnostics are returned, the result value MUST be of the +// capsule type that the decoder function was derived from. If the returned +// error diagnostics prevent producing a value at all, return cty.NilVal. +type CustomExpressionDecoderFunc func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) + +// CustomExpressionDecoderForType takes any cty type and returns its +// custom expression decoder implementation if it has one. If it is not a +// capsule type or it does not implement a custom expression decoder, this +// function returns nil. +func CustomExpressionDecoderForType(ty cty.Type) CustomExpressionDecoderFunc { + if !ty.IsCapsuleType() { + return nil + } + if fn, ok := ty.CapsuleExtensionData(CustomExpressionDecoder).(CustomExpressionDecoderFunc); ok { + return fn + } + return nil +} diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/expression_type.go b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/expression_type.go new file mode 100644 index 0000000000..af7c66c235 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/expression_type.go @@ -0,0 +1,146 @@ +package customdecode + +import ( + "fmt" + "reflect" + + "github.com/hashicorp/hcl/v2" + "github.com/zclconf/go-cty/cty" +) + +// ExpressionType is a cty capsule type that carries hcl.Expression values. +// +// This type implements custom decoding in the most general way possible: it +// just captures whatever expression is given to it, with no further processing +// whatsoever. It could therefore be useful in situations where an application +// must defer processing of the expression content until a later step. +// +// ExpressionType only captures the expression, not the evaluation context it +// was destined to be evaluated in. That means this type can be fine for +// situations where the recipient of the value only intends to do static +// analysis, but ExpressionClosureType is more appropriate in situations where +// the recipient will eventually evaluate the given expression. +var ExpressionType cty.Type + +// ExpressionVal returns a new cty value of type ExpressionType, wrapping the +// given expression. +func ExpressionVal(expr hcl.Expression) cty.Value { + return cty.CapsuleVal(ExpressionType, &expr) +} + +// ExpressionFromVal returns the expression encapsulated in the given value, or +// panics if the value is not a known value of ExpressionType. +func ExpressionFromVal(v cty.Value) hcl.Expression { + if !v.Type().Equals(ExpressionType) { + panic("value is not of ExpressionType") + } + ptr := v.EncapsulatedValue().(*hcl.Expression) + return *ptr +} + +// ExpressionClosureType is a cty capsule type that carries hcl.Expression +// values along with their original evaluation contexts. +// +// This is similar to ExpressionType except that during custom decoding it +// also captures the hcl.EvalContext that was provided, allowing callers to +// evaluate the expression later in the same context where it would originally +// have been evaluated, or a context derived from that one. +var ExpressionClosureType cty.Type + +// ExpressionClosure is the type encapsulated in ExpressionClosureType +type ExpressionClosure struct { + Expression hcl.Expression + EvalContext *hcl.EvalContext +} + +// ExpressionClosureVal returns a new cty value of type ExpressionClosureType, +// wrapping the given expression closure. +func ExpressionClosureVal(closure *ExpressionClosure) cty.Value { + return cty.CapsuleVal(ExpressionClosureType, closure) +} + +// Value evaluates the closure's expression using the closure's EvalContext, +// returning the result. +func (c *ExpressionClosure) Value() (cty.Value, hcl.Diagnostics) { + return c.Expression.Value(c.EvalContext) +} + +// ExpressionClosureFromVal returns the expression closure encapsulated in the +// given value, or panics if the value is not a known value of +// ExpressionClosureType. +// +// The caller MUST NOT modify the returned closure or the EvalContext inside +// it. To derive a new EvalContext, either create a child context or make +// a copy. +func ExpressionClosureFromVal(v cty.Value) *ExpressionClosure { + if !v.Type().Equals(ExpressionClosureType) { + panic("value is not of ExpressionClosureType") + } + return v.EncapsulatedValue().(*ExpressionClosure) +} + +func init() { + // Getting hold of a reflect.Type for hcl.Expression is a bit tricky because + // it's an interface type, but we can do it with some indirection. + goExpressionType := reflect.TypeOf((*hcl.Expression)(nil)).Elem() + + ExpressionType = cty.CapsuleWithOps("expression", goExpressionType, &cty.CapsuleOps{ + ExtensionData: func(key interface{}) interface{} { + switch key { + case CustomExpressionDecoder: + return CustomExpressionDecoderFunc( + func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { + return ExpressionVal(expr), nil + }, + ) + default: + return nil + } + }, + TypeGoString: func(_ reflect.Type) string { + return "customdecode.ExpressionType" + }, + GoString: func(raw interface{}) string { + exprPtr := raw.(*hcl.Expression) + return fmt.Sprintf("customdecode.ExpressionVal(%#v)", *exprPtr) + }, + RawEquals: func(a, b interface{}) bool { + aPtr := a.(*hcl.Expression) + bPtr := b.(*hcl.Expression) + return reflect.DeepEqual(*aPtr, *bPtr) + }, + }) + ExpressionClosureType = cty.CapsuleWithOps("expression closure", reflect.TypeOf(ExpressionClosure{}), &cty.CapsuleOps{ + ExtensionData: func(key interface{}) interface{} { + switch key { + case CustomExpressionDecoder: + return CustomExpressionDecoderFunc( + func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { + return ExpressionClosureVal(&ExpressionClosure{ + Expression: expr, + EvalContext: ctx, + }), nil + }, + ) + default: + return nil + } + }, + TypeGoString: func(_ reflect.Type) string { + return "customdecode.ExpressionClosureType" + }, + GoString: func(raw interface{}) string { + closure := raw.(*ExpressionClosure) + return fmt.Sprintf("customdecode.ExpressionClosureVal(%#v)", closure) + }, + RawEquals: func(a, b interface{}) bool { + closureA := a.(*ExpressionClosure) + closureB := b.(*ExpressionClosure) + // The expression itself compares by deep equality, but EvalContexts + // conventionally compare by pointer identity, so we'll comply + // with both conventions here by testing them separately. + return closureA.EvalContext == closureB.EvalContext && + reflect.DeepEqual(closureA.Expression, closureB.Expression) + }, + }) +} diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/README.md b/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/README.md index ec70947028..058f1e3d84 100644 --- a/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/README.md +++ b/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/README.md @@ -65,3 +65,71 @@ type checking it will be one that has identifiers as its attributes; object types with weird attributes generally show up only from arbitrary object constructors in configuration files, which are usually treated either as maps or as the dynamic pseudo-type. + +## Type Constraints as Values + +Along with defining a convention for writing down types using HCL expression +constructs, this package also includes a mechanism for representing types as +values that can be used as data within an HCL-based language. + +`typeexpr.TypeConstraintType` is a +[`cty` capsule type](https://github.com/zclconf/go-cty/blob/master/docs/types.md#capsule-types) +that encapsulates `cty.Type` values. You can construct such a value directly +using the `TypeConstraintVal` function: + +```go +tyVal := typeexpr.TypeConstraintVal(cty.String) + +// We can unpack the type from a value using TypeConstraintFromVal +ty := typeExpr.TypeConstraintFromVal(tyVal) +``` + +However, the primary purpose of `typeexpr.TypeConstraintType` is to be +specified as the type constraint for an argument, in which case it serves +as a signal for HCL to treat the argument expression as a type constraint +expression as defined above, rather than as a normal value expression. + +"An argument" in the above in practice means the following two locations: + +* As the type constraint for a parameter of a cty function that will be + used in an `hcl.EvalContext`. In that case, function calls in the HCL + native expression syntax will require the argument to be valid type constraint + expression syntax and the function implementation will receive a + `TypeConstraintType` value as the argument value for that parameter. + +* As the type constraint for a `hcldec.AttrSpec` or `hcldec.BlockAttrsSpec` + when decoding an HCL body using `hcldec`. In that case, the attributes + with that type constraint will be required to be valid type constraint + expression syntax and the result will be a `TypeConstraintType` value. + +Note that the special handling of these arguments means that an argument +marked in this way must use the type constraint syntax directly. It is not +valid to pass in a value of `TypeConstraintType` that has been obtained +dynamically via some other expression result. + +`TypeConstraintType` is provided with the intent of using it internally within +application code when incorporating type constraint expression syntax into +an HCL-based language, not to be used for dynamic "programming with types". A +calling application could support programming with types by defining its _own_ +capsule type, but that is not the purpose of `TypeConstraintType`. + +## The "convert" `cty` Function + +Building on the `TypeConstraintType` described in the previous section, this +package also provides `typeexpr.ConvertFunc` which is a cty function that +can be placed into a `cty.EvalContext` (conventionally named "convert") in +order to provide a general type conversion function in an HCL-based language: + +```hcl + foo = convert("true", bool) +``` + +The second parameter uses the mechanism described in the previous section to +require its argument to be a type constraint expression rather than a value +expression. In doing so, it allows converting with any type constraint that +can be expressed in this package's type constraint syntax. In the above example, +the `foo` argument would receive a boolean true, or `cty.True` in `cty` terms. + +The target type constraint must always be provided statically using inline +type constraint syntax. There is no way to _dynamically_ select a type +constraint using this function. diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/type_type.go b/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/type_type.go new file mode 100644 index 0000000000..5462d82c3c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/type_type.go @@ -0,0 +1,118 @@ +package typeexpr + +import ( + "fmt" + "reflect" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/ext/customdecode" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/convert" + "github.com/zclconf/go-cty/cty/function" +) + +// TypeConstraintType is a cty capsule type that allows cty type constraints to +// be used as values. +// +// If TypeConstraintType is used in a context supporting the +// customdecode.CustomExpressionDecoder extension then it will implement +// expression decoding using the TypeConstraint function, thus allowing +// type expressions to be used in contexts where value expressions might +// normally be expected, such as in arguments to function calls. +var TypeConstraintType cty.Type + +// TypeConstraintVal constructs a cty.Value whose type is +// TypeConstraintType. +func TypeConstraintVal(ty cty.Type) cty.Value { + return cty.CapsuleVal(TypeConstraintType, &ty) +} + +// TypeConstraintFromVal extracts the type from a cty.Value of +// TypeConstraintType that was previously constructed using TypeConstraintVal. +// +// If the given value isn't a known, non-null value of TypeConstraintType +// then this function will panic. +func TypeConstraintFromVal(v cty.Value) cty.Type { + if !v.Type().Equals(TypeConstraintType) { + panic("value is not of TypeConstraintType") + } + ptr := v.EncapsulatedValue().(*cty.Type) + return *ptr +} + +// ConvertFunc is a cty function that implements type conversions. +// +// Its signature is as follows: +// convert(value, type_constraint) +// +// ...where type_constraint is a type constraint expression as defined by +// typeexpr.TypeConstraint. +// +// It relies on HCL's customdecode extension and so it's not suitable for use +// in non-HCL contexts or if you are using a HCL syntax implementation that +// does not support customdecode for function arguments. However, it _is_ +// supported for function calls in the HCL native expression syntax. +var ConvertFunc function.Function + +func init() { + TypeConstraintType = cty.CapsuleWithOps("type constraint", reflect.TypeOf(cty.Type{}), &cty.CapsuleOps{ + ExtensionData: func(key interface{}) interface{} { + switch key { + case customdecode.CustomExpressionDecoder: + return customdecode.CustomExpressionDecoderFunc( + func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { + ty, diags := TypeConstraint(expr) + if diags.HasErrors() { + return cty.NilVal, diags + } + return TypeConstraintVal(ty), nil + }, + ) + default: + return nil + } + }, + TypeGoString: func(_ reflect.Type) string { + return "typeexpr.TypeConstraintType" + }, + GoString: func(raw interface{}) string { + tyPtr := raw.(*cty.Type) + return fmt.Sprintf("typeexpr.TypeConstraintVal(%#v)", *tyPtr) + }, + RawEquals: func(a, b interface{}) bool { + aPtr := a.(*cty.Type) + bPtr := b.(*cty.Type) + return (*aPtr).Equals(*bPtr) + }, + }) + + ConvertFunc = function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "value", + Type: cty.DynamicPseudoType, + AllowNull: true, + AllowDynamicType: true, + }, + { + Name: "type", + Type: TypeConstraintType, + }, + }, + Type: func(args []cty.Value) (cty.Type, error) { + wantTypePtr := args[1].EncapsulatedValue().(*cty.Type) + got, err := convert.Convert(args[0], *wantTypePtr) + if err != nil { + return cty.NilType, function.NewArgError(0, err) + } + return got.Type(), nil + }, + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + v, err := convert.Convert(args[0], retType) + if err != nil { + return cty.NilVal, function.NewArgError(0, err) + } + return v, nil + }, + }) +} diff --git a/vendor/github.com/hashicorp/hcl/v2/go.mod b/vendor/github.com/hashicorp/hcl/v2/go.mod index c152e6016f..d80c99d9b6 100644 --- a/vendor/github.com/hashicorp/hcl/v2/go.mod +++ b/vendor/github.com/hashicorp/hcl/v2/go.mod @@ -6,7 +6,7 @@ require ( github.com/apparentlymart/go-textseg v1.0.0 github.com/davecgh/go-spew v1.1.1 github.com/go-test/deep v1.0.3 - github.com/google/go-cmp v0.2.0 + github.com/google/go-cmp v0.3.1 github.com/kr/pretty v0.1.0 github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 @@ -14,7 +14,7 @@ require ( github.com/sergi/go-diff v1.0.0 github.com/spf13/pflag v1.0.2 github.com/stretchr/testify v1.2.2 // indirect - github.com/zclconf/go-cty v1.1.0 + github.com/zclconf/go-cty v1.2.0 golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734 golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82 // indirect golang.org/x/text v0.3.2 // indirect diff --git a/vendor/github.com/hashicorp/hcl/v2/go.sum b/vendor/github.com/hashicorp/hcl/v2/go.sum index b3b95415f8..76b135fb47 100644 --- a/vendor/github.com/hashicorp/hcl/v2/go.sum +++ b/vendor/github.com/hashicorp/hcl/v2/go.sum @@ -9,8 +9,8 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -29,8 +29,8 @@ github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= -github.com/zclconf/go-cty v1.1.0 h1:uJwc9HiBOCpoKIObTQaLR+tsEXx1HBHnOsOOpcdhZgw= -github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= +github.com/zclconf/go-cty v1.2.0 h1:sPHsy7ADcIZQP3vILvTjrh74ZA175TFP5vqiNK1UmlI= +github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734 h1:p/H982KKEjUnLJkM3tt/LemDnOc1GiZL5FCVlORJ5zo= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= diff --git a/vendor/github.com/hashicorp/hcl/v2/gohcl/decode.go b/vendor/github.com/hashicorp/hcl/v2/gohcl/decode.go index 7ba08eee00..f0d589d777 100644 --- a/vendor/github.com/hashicorp/hcl/v2/gohcl/decode.go +++ b/vendor/github.com/hashicorp/hcl/v2/gohcl/decode.go @@ -147,7 +147,9 @@ func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) if len(blocks) == 0 { if isSlice || isPtr { - val.Field(fieldIdx).Set(reflect.Zero(field.Type)) + if val.Field(fieldIdx).IsNil() { + val.Field(fieldIdx).Set(reflect.Zero(field.Type)) + } } else { diags = append(diags, &hcl.Diagnostic{ Severity: hcl.DiagError, @@ -166,11 +168,20 @@ func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) if isPtr { elemType = reflect.PtrTo(ty) } - sli := reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks)) + sli := val.Field(fieldIdx) + if sli.IsNil() { + sli = reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks)) + } for i, block := range blocks { if isPtr { - v := reflect.New(ty) + if i >= sli.Len() { + sli = reflect.Append(sli, reflect.New(ty)) + } + v := sli.Index(i) + if v.IsNil() { + v = reflect.New(ty) + } diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...) sli.Index(i).Set(v) } else { @@ -178,12 +189,19 @@ func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) } } + if sli.Len() > len(blocks) { + sli.SetLen(len(blocks)) + } + val.Field(fieldIdx).Set(sli) default: block := blocks[0] if isPtr { - v := reflect.New(ty) + v := val.Field(fieldIdx) + if v.IsNil() { + v = reflect.New(ty) + } diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...) val.Field(fieldIdx).Set(v) } else { diff --git a/vendor/github.com/hashicorp/hcl/v2/hcldec/spec.go b/vendor/github.com/hashicorp/hcl/v2/hcldec/spec.go index 6f2d9732c6..a70818e1b5 100644 --- a/vendor/github.com/hashicorp/hcl/v2/hcldec/spec.go +++ b/vendor/github.com/hashicorp/hcl/v2/hcldec/spec.go @@ -6,6 +6,7 @@ import ( "sort" "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/ext/customdecode" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/function" @@ -193,6 +194,14 @@ func (s *AttrSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ct return cty.NullVal(s.Type), nil } + if decodeFn := customdecode.CustomExpressionDecoderForType(s.Type); decodeFn != nil { + v, diags := decodeFn(attr.Expr, ctx) + if v == cty.NilVal { + v = cty.UnknownVal(s.Type) + } + return v, diags + } + val, diags := attr.Expr.Value(ctx) convVal, err := convert.Convert(val, s.Type) @@ -204,8 +213,10 @@ func (s *AttrSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ct "Inappropriate value for attribute %q: %s.", s.Name, err.Error(), ), - Subject: attr.Expr.StartRange().Ptr(), - Context: hcl.RangeBetween(attr.NameRange, attr.Expr.StartRange()).Ptr(), + Subject: attr.Expr.Range().Ptr(), + Context: hcl.RangeBetween(attr.NameRange, attr.Expr.Range()).Ptr(), + Expression: attr.Expr, + EvalContext: ctx, }) // We'll return an unknown value of the _correct_ type so that the // incomplete result can still be used for some analysis use-cases. @@ -1221,16 +1232,29 @@ func (s *BlockAttrsSpec) decode(content *hcl.BodyContent, blockLabels []blockLab vals := make(map[string]cty.Value, len(attrs)) for name, attr := range attrs { + if decodeFn := customdecode.CustomExpressionDecoderForType(s.ElementType); decodeFn != nil { + attrVal, attrDiags := decodeFn(attr.Expr, ctx) + diags = append(diags, attrDiags...) + if attrVal == cty.NilVal { + attrVal = cty.UnknownVal(s.ElementType) + } + vals[name] = attrVal + continue + } + attrVal, attrDiags := attr.Expr.Value(ctx) diags = append(diags, attrDiags...) attrVal, err := convert.Convert(attrVal, s.ElementType) if err != nil { diags = append(diags, &hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: "Invalid attribute value", - Detail: fmt.Sprintf("Invalid value for attribute of %q block: %s.", s.TypeName, err), - Subject: attr.Expr.Range().Ptr(), + Severity: hcl.DiagError, + Summary: "Invalid attribute value", + Detail: fmt.Sprintf("Invalid value for attribute of %q block: %s.", s.TypeName, err), + Subject: attr.Expr.Range().Ptr(), + Context: hcl.RangeBetween(attr.NameRange, attr.Expr.Range()).Ptr(), + Expression: attr.Expr, + EvalContext: ctx, }) attrVal = cty.UnknownVal(s.ElementType) } diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go index 963ed77524..3fe84ddc38 100644 --- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go +++ b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go @@ -5,6 +5,7 @@ import ( "sync" "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/ext/customdecode" "github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty/convert" "github.com/zclconf/go-cty/cty/function" @@ -350,26 +351,38 @@ func (e *FunctionCallExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnosti param = varParam } - val, argDiags := argExpr.Value(ctx) - if len(argDiags) > 0 { + var val cty.Value + if decodeFn := customdecode.CustomExpressionDecoderForType(param.Type); decodeFn != nil { + var argDiags hcl.Diagnostics + val, argDiags = decodeFn(argExpr, ctx) diags = append(diags, argDiags...) - } + if val == cty.NilVal { + val = cty.UnknownVal(param.Type) + } + } else { + var argDiags hcl.Diagnostics + val, argDiags = argExpr.Value(ctx) + if len(argDiags) > 0 { + diags = append(diags, argDiags...) + } - // Try to convert our value to the parameter type - val, err := convert.Convert(val, param.Type) - if err != nil { - diags = append(diags, &hcl.Diagnostic{ - Severity: hcl.DiagError, - Summary: "Invalid function argument", - Detail: fmt.Sprintf( - "Invalid value for %q parameter: %s.", - param.Name, err, - ), - Subject: argExpr.StartRange().Ptr(), - Context: e.Range().Ptr(), - Expression: argExpr, - EvalContext: ctx, - }) + // Try to convert our value to the parameter type + var err error + val, err = convert.Convert(val, param.Type) + if err != nil { + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Invalid function argument", + Detail: fmt.Sprintf( + "Invalid value for %q parameter: %s.", + param.Name, err, + ), + Subject: argExpr.StartRange().Ptr(), + Context: e.Range().Ptr(), + Expression: argExpr, + EvalContext: ctx, + }) + } } argVals[i] = val @@ -615,8 +628,9 @@ type IndexExpr struct { Collection Expression Key Expression - SrcRange hcl.Range - OpenRange hcl.Range + SrcRange hcl.Range + OpenRange hcl.Range + BracketRange hcl.Range } func (e *IndexExpr) walkChildNodes(w internalWalkFunc) { @@ -631,7 +645,7 @@ func (e *IndexExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) { diags = append(diags, collDiags...) diags = append(diags, keyDiags...) - val, indexDiags := hcl.Index(coll, key, &e.SrcRange) + val, indexDiags := hcl.Index(coll, key, &e.BracketRange) setDiagEvalContext(indexDiags, e, ctx) diags = append(diags, indexDiags...) return val, diags diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression_vars_gen.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression_vars_gen.go deleted file mode 100644 index 6793771d42..0000000000 --- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression_vars_gen.go +++ /dev/null @@ -1,99 +0,0 @@ -// This is a 'go generate'-oriented program for producing the "Variables" -// method on every Expression implementation found within this package. -// All expressions share the same implementation for this method, which -// just wraps the package-level function "Variables" and uses an AST walk -// to do its work. - -// +build ignore - -package main - -import ( - "fmt" - "go/ast" - "go/parser" - "go/token" - "os" - "sort" -) - -func main() { - fs := token.NewFileSet() - pkgs, err := parser.ParseDir(fs, ".", nil, 0) - if err != nil { - fmt.Fprintf(os.Stderr, "error while parsing: %s\n", err) - os.Exit(1) - } - pkg := pkgs["hclsyntax"] - - // Walk all the files and collect the receivers of any "Value" methods - // that look like they are trying to implement Expression. - var recvs []string - for _, f := range pkg.Files { - for _, decl := range f.Decls { - fd, ok := decl.(*ast.FuncDecl) - if !ok { - continue - } - if fd.Name.Name != "Value" { - continue - } - results := fd.Type.Results.List - if len(results) != 2 { - continue - } - valResult := fd.Type.Results.List[0].Type.(*ast.SelectorExpr).X.(*ast.Ident) - diagsResult := fd.Type.Results.List[1].Type.(*ast.SelectorExpr).X.(*ast.Ident) - - if valResult.Name != "cty" && diagsResult.Name != "hcl" { - continue - } - - // If we have a method called Value and it returns something in - // "cty" followed by something in "hcl" then that's specific enough - // for now, even though this is not 100% exact as a correct - // implementation of Value. - - recvTy := fd.Recv.List[0].Type - - switch rtt := recvTy.(type) { - case *ast.StarExpr: - name := rtt.X.(*ast.Ident).Name - recvs = append(recvs, fmt.Sprintf("*%s", name)) - default: - fmt.Fprintf(os.Stderr, "don't know what to do with a %T receiver\n", recvTy) - } - - } - } - - sort.Strings(recvs) - - of, err := os.OpenFile("expression_vars.go", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm) - if err != nil { - fmt.Fprintf(os.Stderr, "failed to open output file: %s\n", err) - os.Exit(1) - } - - fmt.Fprint(of, outputPreamble) - for _, recv := range recvs { - fmt.Fprintf(of, outputMethodFmt, recv) - } - fmt.Fprint(of, "\n") - -} - -const outputPreamble = `package hclsyntax - -// Generated by expression_vars_get.go. DO NOT EDIT. -// Run 'go generate' on this package to update the set of functions here. - -import ( - "github.com/hashicorp/hcl/v2" -)` - -const outputMethodFmt = ` - -func (e %s) Variables() []hcl.Traversal { - return Variables(e) -}` diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go index 6fb284a8f7..f67d989e54 100644 --- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go +++ b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go @@ -760,7 +760,7 @@ Traversal: Each: travExpr, Item: itemExpr, - SrcRange: hcl.RangeBetween(dot.Range, lastRange), + SrcRange: hcl.RangeBetween(from.Range(), lastRange), MarkerRange: hcl.RangeBetween(dot.Range, marker.Range), } @@ -819,7 +819,7 @@ Traversal: Each: travExpr, Item: itemExpr, - SrcRange: hcl.RangeBetween(open.Range, travExpr.Range()), + SrcRange: hcl.RangeBetween(from.Range(), travExpr.Range()), MarkerRange: hcl.RangeBetween(open.Range, close.Range), } @@ -867,8 +867,9 @@ Traversal: Collection: ret, Key: keyExpr, - SrcRange: rng, - OpenRange: open.Range, + SrcRange: hcl.RangeBetween(from.Range(), rng), + OpenRange: open.Range, + BracketRange: rng, } } } @@ -899,7 +900,7 @@ func makeRelativeTraversal(expr Expression, next hcl.Traverser, rng hcl.Range) E return &RelativeTraversalExpr{ Source: expr, Traversal: hcl.Traversal{next}, - SrcRange: rng, + SrcRange: hcl.RangeBetween(expr.Range(), rng), } } } diff --git a/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_body.go b/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_body.go index c16d13e3a4..119f53e621 100644 --- a/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_body.go +++ b/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_body.go @@ -60,7 +60,7 @@ func (b *Body) Attributes() map[string]*Attribute { // Blocks returns a new slice of all the blocks in the body. func (b *Body) Blocks() []*Block { ret := make([]*Block, 0, len(b.items)) - for n := range b.items { + for _, n := range b.items.List() { if block, isBlock := n.content.(*Block); isBlock { ret = append(ret, block) } @@ -134,6 +134,26 @@ func (b *Body) RemoveBlock(block *Block) bool { return false } +// SetAttributeRaw either replaces the expression of an existing attribute +// of the given name or adds a new attribute definition to the end of the block, +// using the given tokens verbatim as the expression. +// +// The same caveats apply to this function as for NewExpressionRaw on which +// it is based. If possible, prefer to use SetAttributeValue or +// SetAttributeTraversal. +func (b *Body) SetAttributeRaw(name string, tokens Tokens) *Attribute { + attr := b.GetAttribute(name) + expr := NewExpressionRaw(tokens) + if attr != nil { + attr.expr = attr.expr.ReplaceWith(expr) + } else { + attr := newAttribute() + attr.init(name, expr) + b.appendItem(attr) + } + return attr +} + // SetAttributeValue either replaces the expression of an existing attribute // of the given name or adds a new attribute definition to the end of the block. // diff --git a/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_expression.go b/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_expression.go index 854e71690b..073c30871e 100644 --- a/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_expression.go +++ b/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_expression.go @@ -21,6 +21,29 @@ func newExpression() *Expression { } } +// NewExpressionRaw constructs an expression containing the given raw tokens. +// +// There is no automatic validation that the given tokens produce a valid +// expression. Callers of thus function must take care to produce invalid +// expression tokens. Where possible, use the higher-level functions +// NewExpressionLiteral or NewExpressionAbsTraversal instead. +// +// Because NewExpressionRaw does not interpret the given tokens in any way, +// an expression created by NewExpressionRaw will produce an empty result +// for calls to its method Variables, even if the given token sequence +// contains a subslice that would normally be interpreted as a traversal under +// parsing. +func NewExpressionRaw(tokens Tokens) *Expression { + expr := newExpression() + // We copy the tokens here in order to make sure that later mutations + // by the caller don't inadvertently cause our expression to become + // invalid. + copyTokens := make(Tokens, len(tokens)) + copy(copyTokens, tokens) + expr.children.AppendUnstructuredTokens(copyTokens) + return expr +} + // NewExpressionLiteral constructs an an expression that represents the given // literal value. // diff --git a/vendor/github.com/hashicorp/hcl/v2/hclwrite/generate.go b/vendor/github.com/hashicorp/hcl/v2/hclwrite/generate.go index 289a30d684..4d439acd7a 100644 --- a/vendor/github.com/hashicorp/hcl/v2/hclwrite/generate.go +++ b/vendor/github.com/hashicorp/hcl/v2/hclwrite/generate.go @@ -159,12 +159,12 @@ func appendTokensForValue(val cty.Value, toks Tokens) Tokens { func appendTokensForTraversal(traversal hcl.Traversal, toks Tokens) Tokens { for _, step := range traversal { - appendTokensForTraversalStep(step, toks) + toks = appendTokensForTraversalStep(step, toks) } return toks } -func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) { +func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) Tokens { switch ts := step.(type) { case hcl.TraverseRoot: toks = append(toks, &Token{ @@ -188,7 +188,7 @@ func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) { Type: hclsyntax.TokenOBrack, Bytes: []byte{'['}, }) - appendTokensForValue(ts.Key, toks) + toks = appendTokensForValue(ts.Key, toks) toks = append(toks, &Token{ Type: hclsyntax.TokenCBrack, Bytes: []byte{']'}, @@ -196,6 +196,8 @@ func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) { default: panic(fmt.Sprintf("unsupported traversal step type %T", step)) } + + return toks } func escapeQuotedStringLit(s string) []byte { diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load.go index faa93ed6fb..a070f76e04 100644 --- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load.go +++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load.go @@ -52,12 +52,12 @@ func (m *Module) init(diags Diagnostics) { // case so callers can easily recognize it. for _, r := range m.ManagedResources { if _, exists := m.RequiredProviders[r.Provider.Name]; !exists { - m.RequiredProviders[r.Provider.Name] = []string{} + m.RequiredProviders[r.Provider.Name] = &ProviderRequirement{} } } for _, r := range m.DataResources { if _, exists := m.RequiredProviders[r.Provider.Name]; !exists { - m.RequiredProviders[r.Provider.Name] = []string{} + m.RequiredProviders[r.Provider.Name] = &ProviderRequirement{} } } diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_hcl.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_hcl.go index 9cb3aeef1b..f83ac87267 100644 --- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_hcl.go +++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_hcl.go @@ -7,8 +7,8 @@ import ( "github.com/hashicorp/hcl/v2/hclsyntax" - "github.com/hashicorp/hcl/v2/gohcl" "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/gohcl" "github.com/hashicorp/hcl/v2/hclparse" ctyjson "github.com/zclconf/go-cty/cty/json" ) @@ -51,18 +51,17 @@ func loadModule(dir string) (*Module, Diagnostics) { } } - for _, block := range content.Blocks { - // Our schema only allows required_providers here, so we - // assume that we'll only get that block type. - attrs, attrDiags := block.Body.JustAttributes() - diags = append(diags, attrDiags...) - - for name, attr := range attrs { - var version string - valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version) - diags = append(diags, valDiags...) - if !valDiags.HasErrors() { - mod.RequiredProviders[name] = append(mod.RequiredProviders[name], version) + for _, innerBlock := range content.Blocks { + switch innerBlock.Type { + case "required_providers": + reqs, reqsDiags := decodeRequiredProvidersBlock(innerBlock) + diags = append(diags, reqsDiags...) + for name, req := range reqs { + if _, exists := mod.RequiredProviders[name]; !exists { + mod.RequiredProviders[name] = req + } else { + mod.RequiredProviders[name].VersionConstraints = append(mod.RequiredProviders[name].VersionConstraints, req.VersionConstraints...) + } } } } @@ -178,22 +177,20 @@ func loadModule(dir string) (*Module, Diagnostics) { diags = append(diags, contentDiags...) name := block.Labels[0] - + // Even if there isn't an explicit version required, we still + // need an entry in our map to signal the unversioned dependency. + if _, exists := mod.RequiredProviders[name]; !exists { + mod.RequiredProviders[name] = &ProviderRequirement{} + } if attr, defined := content.Attributes["version"]; defined { var version string valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version) diags = append(diags, valDiags...) if !valDiags.HasErrors() { - mod.RequiredProviders[name] = append(mod.RequiredProviders[name], version) + mod.RequiredProviders[name].VersionConstraints = append(mod.RequiredProviders[name].VersionConstraints, version) } } - // Even if there wasn't an explicit version required, we still - // need an entry in our map to signal the unversioned dependency. - if _, exists := mod.RequiredProviders[name]; !exists { - mod.RequiredProviders[name] = []string{} - } - case "resource", "data": content, _, contentDiags := block.Body.PartialContent(resourceSchema) diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_legacy.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_legacy.go index 86ffdf11dd..c79b033b6b 100644 --- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_legacy.go +++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_legacy.go @@ -267,17 +267,15 @@ func loadModuleLegacyHCL(dir string) (*Module, Diagnostics) { if err != nil { return nil, diagnosticsErrorf("invalid provider block at %s: %s", item.Pos(), err) } - - if block.Version != "" { - mod.RequiredProviders[name] = append(mod.RequiredProviders[name], block.Version) - } - // Even if there wasn't an explicit version required, we still // need an entry in our map to signal the unversioned dependency. if _, exists := mod.RequiredProviders[name]; !exists { - mod.RequiredProviders[name] = []string{} + mod.RequiredProviders[name] = &ProviderRequirement{} } + if block.Version != "" { + mod.RequiredProviders[name].VersionConstraints = append(mod.RequiredProviders[name].VersionConstraints, block.Version) + } } } } diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/module.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/module.go index 65ddb23073..63027d1841 100644 --- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/module.go +++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/module.go @@ -9,8 +9,8 @@ type Module struct { Variables map[string]*Variable `json:"variables"` Outputs map[string]*Output `json:"outputs"` - RequiredCore []string `json:"required_core,omitempty"` - RequiredProviders map[string][]string `json:"required_providers"` + RequiredCore []string `json:"required_core,omitempty"` + RequiredProviders map[string]*ProviderRequirement `json:"required_providers"` ManagedResources map[string]*Resource `json:"managed_resources"` DataResources map[string]*Resource `json:"data_resources"` @@ -27,7 +27,7 @@ func newModule(path string) *Module { Path: path, Variables: make(map[string]*Variable), Outputs: make(map[string]*Output), - RequiredProviders: make(map[string][]string), + RequiredProviders: make(map[string]*ProviderRequirement), ManagedResources: make(map[string]*Resource), DataResources: make(map[string]*Resource), ModuleCalls: make(map[string]*ModuleCall), diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/provider_ref.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/provider_ref.go index d924837785..157c8c2c15 100644 --- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/provider_ref.go +++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/provider_ref.go @@ -1,5 +1,11 @@ package tfconfig +import ( + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/gohcl" + "github.com/zclconf/go-cty/cty/gocty" +) + // ProviderRef is a reference to a provider configuration within a module. // It represents the contents of a "provider" argument in a resource, or // a value in the "providers" map for a module call. @@ -7,3 +13,73 @@ type ProviderRef struct { Name string `json:"name"` Alias string `json:"alias,omitempty"` // Empty if the default provider configuration is referenced } + +type ProviderRequirement struct { + Source string `json:"source,omitempty"` + VersionConstraints []string `json:"version_constraints,omitempty"` +} + +func decodeRequiredProvidersBlock(block *hcl.Block) (map[string]*ProviderRequirement, hcl.Diagnostics) { + attrs, diags := block.Body.JustAttributes() + reqs := make(map[string]*ProviderRequirement) + for name, attr := range attrs { + expr, err := attr.Expr.Value(nil) + if err != nil { + diags = append(diags, err...) + } + + switch { + case expr.Type().IsPrimitiveType(): + var version string + valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version) + diags = append(diags, valDiags...) + if !valDiags.HasErrors() { + reqs[name] = &ProviderRequirement{ + VersionConstraints: []string{version}, + } + } + + case expr.Type().IsObjectType(): + var pr ProviderRequirement + if expr.Type().HasAttribute("version") { + var version string + err := gocty.FromCtyValue(expr.GetAttr("version"), &version) + if err == nil { + pr.VersionConstraints = append(pr.VersionConstraints, version) + } else { + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unsuitable value type", + Detail: "Unsuitable value: string required", + Subject: attr.Expr.Range().Ptr(), + }) + } + } + if expr.Type().HasAttribute("source") { + var source string + err := gocty.FromCtyValue(expr.GetAttr("source"), &source) + if err == nil { + pr.Source = source + } else { + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unsuitable value type", + Detail: "Unsuitable value: string required", + Subject: attr.Expr.Range().Ptr(), + }) + } + } + reqs[name] = &pr + + default: + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unsuitable value type", + Detail: "Unsuitable value: string required", + Subject: attr.Expr.Range().Ptr(), + }) + } + } + + return reqs, diags +} diff --git a/vendor/github.com/hashicorp/terraform/LICENSE b/vendor/github.com/hashicorp/terraform/LICENSE new file mode 100644 index 0000000000..c33dcc7c92 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform/LICENSE @@ -0,0 +1,354 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. “Contributor” + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. “Contributor Version” + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor’s Contribution. + +1.3. “Contribution” + + means Covered Software of a particular Contributor. + +1.4. “Covered Software” + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. “Incompatible With Secondary Licenses” + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of version + 1.1 or earlier of the License, but not also under the terms of a + Secondary License. + +1.6. “Executable Form” + + means any form of the work other than Source Code Form. + +1.7. “Larger Work” + + means a work that combines Covered Software with other material, in a separate + file or files, that is not Covered Software. + +1.8. “License” + + means this document. + +1.9. “Licensable” + + means having the right to grant, to the maximum extent possible, whether at the + time of the initial grant or subsequently, any and all of the rights conveyed by + this License. + +1.10. “Modifications” + + means any of the following: + + a. any file in Source Code Form that results from an addition to, deletion + from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. “Patent Claims” of a Contributor + + means any patent claim(s), including without limitation, method, process, + and apparatus claims, in any patent Licensable by such Contributor that + would be infringed, but for the grant of the License, by the making, + using, selling, offering for sale, having made, import, or transfer of + either its Contributions or its Contributor Version. + +1.12. “Secondary License” + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. “Source Code Form” + + means the form of the work preferred for making modifications. + +1.14. “You” (or “Your”) + + means an individual or a legal entity exercising rights under this + License. For legal entities, “You” includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, “control” means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or as + part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its Contributions + or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution become + effective for each Contribution on the date the Contributor first distributes + such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under this + License. No additional rights or licenses will be implied from the distribution + or licensing of Covered Software under this License. Notwithstanding Section + 2.1(b) above, no patent license is granted by a Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party’s + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of its + Contributions. + + This License does not grant any rights in the trademarks, service marks, or + logos of any Contributor (except as may be necessary to comply with the + notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this License + (see Section 10.2) or under the terms of a Secondary License (if permitted + under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its Contributions + are its original creation(s) or it has sufficient rights to grant the + rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under applicable + copyright doctrines of fair use, fair dealing, or other equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under the + terms of this License. You must inform recipients that the Source Code Form + of the Covered Software is governed by the terms of this License, and how + they can obtain a copy of this License. You may not attempt to alter or + restrict the recipients’ rights in the Source Code Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this License, + or sublicense it under different terms, provided that the license for + the Executable Form does not attempt to limit or alter the recipients’ + rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for the + Covered Software. If the Larger Work is a combination of Covered Software + with a work governed by one or more Secondary Licenses, and the Covered + Software is not Incompatible With Secondary Licenses, this License permits + You to additionally distribute such Covered Software under the terms of + such Secondary License(s), so that the recipient of the Larger Work may, at + their option, further distribute the Covered Software under the terms of + either this License or such Secondary License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices (including + copyright notices, patent notices, disclaimers of warranty, or limitations + of liability) contained within the Source Code Form of the Covered + Software, except that You may alter any license notices to the extent + required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on behalf + of any Contributor. You must make it absolutely clear that any such + warranty, support, indemnity, or liability obligation is offered by You + alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, judicial + order, or regulation then You must: (a) comply with the terms of this License + to the maximum extent possible; and (b) describe the limitations and the code + they affect. Such description must be placed in a text file included with all + distributions of the Covered Software under this License. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing basis, + if such Contributor fails to notify You of the non-compliance by some + reasonable means prior to 60 days after You have come back into compliance. + Moreover, Your grants from a particular Contributor are reinstated on an + ongoing basis if such Contributor notifies You of the non-compliance by + some reasonable means, this is the first time You have received notice of + non-compliance with this License from such Contributor, and You become + compliant prior to 30 days after Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, counter-claims, + and cross-claims) alleging that a Contributor Version directly or + indirectly infringes any patent, then the rights granted to You by any and + all Contributors for the Covered Software under Section 2.1 of this License + shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an “as is” basis, without + warranty of any kind, either expressed, implied, or statutory, including, + without limitation, warranties that the Covered Software is free of defects, + merchantable, fit for a particular purpose or non-infringing. The entire + risk as to the quality and performance of the Covered Software is with You. + Should any Covered Software prove defective in any respect, You (not any + Contributor) assume the cost of any necessary servicing, repair, or + correction. This disclaimer of warranty constitutes an essential part of this + License. No use of any Covered Software is authorized under this License + except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from such + party’s negligence to the extent applicable law prohibits such limitation. + Some jurisdictions do not allow the exclusion or limitation of incidental or + consequential damages, so this exclusion and limitation may not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts of + a jurisdiction where the defendant maintains its principal place of business + and such litigation shall be governed by laws of that jurisdiction, without + reference to its conflict-of-law provisions. Nothing in this Section shall + prevent a party’s ability to bring cross-claims or counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject matter + hereof. If any provision of this License is held to be unenforceable, such + provision shall be reformed only to the extent necessary to make it + enforceable. Any law or regulation which provides that the language of a + contract shall be construed against the drafter shall not be used to construe + this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of + the License under which You originally received the Covered Software, or + under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a modified + version of this License if you rename the license and remove any + references to the name of the license steward (except to note that such + modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - “Incompatible With Secondary Licenses” Notice + + This Source Code Form is “Incompatible + With Secondary Licenses”, as defined by + the Mozilla Public License, v. 2.0. + diff --git a/vendor/github.com/hashicorp/terraform/helper/logging/indent.go b/vendor/github.com/hashicorp/terraform/helper/logging/indent.go new file mode 100644 index 0000000000..e0da0d7c73 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform/helper/logging/indent.go @@ -0,0 +1,23 @@ +package logging + +import ( + "strings" +) + +// Indent adds two spaces to the beginning of each line of the given string, +// with the goal of making the log level filter understand it as a line +// continuation rather than possibly as new log lines. +func Indent(s string) string { + var b strings.Builder + for len(s) > 0 { + end := strings.IndexByte(s, '\n') + if end == -1 { + end = len(s) - 1 + } + var l string + l, s = s[:end+1], s[end+1:] + b.WriteString(" ") + b.WriteString(l) + } + return b.String() +} diff --git a/vendor/github.com/hashicorp/terraform/helper/logging/level.go b/vendor/github.com/hashicorp/terraform/helper/logging/level.go new file mode 100644 index 0000000000..0dc4dfe8d5 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform/helper/logging/level.go @@ -0,0 +1,159 @@ +package logging + +import ( + "bytes" + "io" + "sync" +) + +// LogLevel is a special string, conventionally written all in uppercase, that +// can be used to mark a log line for filtering and to specify filtering +// levels in the LevelFilter type. +type LogLevel string + +// LevelFilter is an io.Writer that can be used with a logger that +// will attempt to filter out log messages that aren't at least a certain +// level. +// +// This filtering is HEURISTIC-BASED, and so will not be 100% reliable. The +// assumptions it makes are: +// +// - Individual log messages are never split across multiple calls to the +// Write method. +// +// - Messages that carry levels are marked by a sequence starting with "[", +// then the level name string, and then "]". Any message without a sequence +// like this is an un-levelled message, and is not subject to filtering. +// +// - Each \n-delimited line in a write is a separate log message, unless a +// line starts with at least one space in which case it is interpreted +// as a continuation of the previous line. +// +// - If a log line starts with a non-whitespace character that isn't a digit +// then it's recognized as a degenerate continuation, because "real" log +// lines should start with a date/time and thus always have a leading +// digit. (This also cleans up after some situations where the assumptuion +// that messages arrive atomically aren't met, which is sadly sometimes +// true for longer messages that trip over some buffering behavior in +// panicwrap.) +// +// Because logging is a cross-cutting concern and not fully under the control +// of Terraform itself, there will certainly be cases where the above +// heuristics will fail. For example, it is likely that LevelFilter will +// occasionally misinterpret a continuation line as a new message because the +// code generating it doesn't know about our indentation convention. +// +// Our goal here is just to make a best effort to reduce the log volume, +// accepting that the results will not be 100% correct. +// +// Logging calls within Terraform Core should follow the above conventions so +// that the log output is broadly correct, however. +// +// Once the filter is in use somewhere, it is not safe to modify +// the structure. +type LevelFilter struct { + // Levels is the list of log levels, in increasing order of + // severity. Example might be: {"DEBUG", "WARN", "ERROR"}. + Levels []LogLevel + + // MinLevel is the minimum level allowed through + MinLevel LogLevel + + // The underlying io.Writer where log messages that pass the filter + // will be set. + Writer io.Writer + + badLevels map[LogLevel]struct{} + show bool + once sync.Once +} + +// Check will check a given line if it would be included in the level +// filter. +func (f *LevelFilter) Check(line []byte) bool { + f.once.Do(f.init) + + // Check for a log level + var level LogLevel + x := bytes.IndexByte(line, '[') + if x >= 0 { + y := bytes.IndexByte(line[x:], ']') + if y >= 0 { + level = LogLevel(line[x+1 : x+y]) + } + } + + //return level == "" + + _, ok := f.badLevels[level] + return !ok +} + +// Write is a specialized implementation of io.Writer suitable for being +// the output of a logger from the "log" package. +// +// This Writer implementation assumes that it will only recieve byte slices +// containing one or more entire lines of log output, each one terminated by +// a newline. This is compatible with the behavior of the "log" package +// directly, and is also tolerant of intermediaries that might buffer multiple +// separate writes together, as long as no individual log line is ever +// split into multiple slices. +// +// Behavior is undefined if any log line is split across multiple writes or +// written without a trailing '\n' delimiter. +func (f *LevelFilter) Write(p []byte) (n int, err error) { + for len(p) > 0 { + // Split at the first \n, inclusive + idx := bytes.IndexByte(p, '\n') + if idx == -1 { + // Invalid, undelimited write. We'll tolerate it assuming that + // our assumptions are being violated, but the results may be + // non-ideal. + idx = len(p) - 1 + break + } + var l []byte + l, p = p[:idx+1], p[idx+1:] + // Lines starting with characters other than decimal digits (including + // whitespace) are assumed to be continuations lines. This is an + // imprecise heuristic, but experimentally it seems to generate + // "good enough" results from Terraform Core's own logging. Its mileage + // may vary with output from other systems. + if l[0] >= '0' && l[0] <= '9' { + f.show = f.Check(l) + } + if f.show { + _, err = f.Writer.Write(l) + if err != nil { + // Technically it's not correct to say we've written the whole + // buffer, but for our purposes here it's good enough as we're + // only implementing io.Writer enough to satisfy logging + // use-cases. + return len(p), err + } + } + } + + // We always behave as if we wrote the whole of the buffer, even if + // we actually skipped some lines. We're only implementiong io.Writer + // enough to satisfy logging use-cases. + return len(p), nil +} + +// SetMinLevel is used to update the minimum log level +func (f *LevelFilter) SetMinLevel(min LogLevel) { + f.MinLevel = min + f.init() +} + +func (f *LevelFilter) init() { + badLevels := make(map[LogLevel]struct{}) + for _, level := range f.Levels { + if level == f.MinLevel { + break + } + badLevels[level] = struct{}{} + } + f.badLevels = badLevels + f.show = true +} diff --git a/vendor/github.com/hashicorp/terraform/helper/logging/logging.go b/vendor/github.com/hashicorp/terraform/helper/logging/logging.go new file mode 100644 index 0000000000..75627cf02e --- /dev/null +++ b/vendor/github.com/hashicorp/terraform/helper/logging/logging.go @@ -0,0 +1,109 @@ +package logging + +import ( + "io" + "io/ioutil" + "log" + "os" + "strings" + "syscall" +) + +// These are the environmental variables that determine if we log, and if +// we log whether or not the log should go to a file. +const ( + EnvLog = "TF_LOG" // Set to True + EnvLogFile = "TF_LOG_PATH" // Set to a file +) + +// ValidLevels are the log level names that Terraform recognizes. +var ValidLevels = []LogLevel{"TRACE", "DEBUG", "INFO", "WARN", "ERROR"} + +// LogOutput determines where we should send logs (if anywhere) and the log level. +func LogOutput() (logOutput io.Writer, err error) { + logOutput = ioutil.Discard + + logLevel := CurrentLogLevel() + if logLevel == "" { + return + } + + logOutput = os.Stderr + if logPath := os.Getenv(EnvLogFile); logPath != "" { + var err error + logOutput, err = os.OpenFile(logPath, syscall.O_CREAT|syscall.O_RDWR|syscall.O_APPEND, 0666) + if err != nil { + return nil, err + } + } + + if logLevel == "TRACE" { + // Just pass through logs directly then, without any level filtering at all. + return logOutput, nil + } + + // Otherwise we'll use our level filter, which is a heuristic-based + // best effort thing that is not totally reliable but helps to reduce + // the volume of logs in some cases. + logOutput = &LevelFilter{ + Levels: ValidLevels, + MinLevel: LogLevel(logLevel), + Writer: logOutput, + } + + return logOutput, nil +} + +// SetOutput checks for a log destination with LogOutput, and calls +// log.SetOutput with the result. If LogOutput returns nil, SetOutput uses +// ioutil.Discard. Any error from LogOutout is fatal. +func SetOutput() { + out, err := LogOutput() + if err != nil { + log.Fatal(err) + } + + if out == nil { + out = ioutil.Discard + } + + log.SetOutput(out) +} + +// CurrentLogLevel returns the current log level string based the environment vars +func CurrentLogLevel() string { + envLevel := os.Getenv(EnvLog) + if envLevel == "" { + return "" + } + + logLevel := "TRACE" + if isValidLogLevel(envLevel) { + // allow following for better ux: info, Info or INFO + logLevel = strings.ToUpper(envLevel) + } else { + log.Printf("[WARN] Invalid log level: %q. Defaulting to level: TRACE. Valid levels are: %+v", + envLevel, ValidLevels) + } + if logLevel != "TRACE" { + log.Printf("[WARN] Log levels other than TRACE are currently unreliable, and are supported only for backward compatibility.\n Use TF_LOG=TRACE to see Terraform's internal logs.\n ----") + } + + return logLevel +} + +// IsDebugOrHigher returns whether or not the current log level is debug or trace +func IsDebugOrHigher() bool { + level := string(CurrentLogLevel()) + return level == "DEBUG" || level == "TRACE" +} + +func isValidLogLevel(level string) bool { + for _, l := range ValidLevels { + if strings.ToUpper(level) == string(l) { + return true + } + } + + return false +} diff --git a/vendor/github.com/hashicorp/terraform/helper/logging/transport.go b/vendor/github.com/hashicorp/terraform/helper/logging/transport.go new file mode 100644 index 0000000000..bddabe647a --- /dev/null +++ b/vendor/github.com/hashicorp/terraform/helper/logging/transport.go @@ -0,0 +1,70 @@ +package logging + +import ( + "bytes" + "encoding/json" + "log" + "net/http" + "net/http/httputil" + "strings" +) + +type transport struct { + name string + transport http.RoundTripper +} + +func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) { + if IsDebugOrHigher() { + reqData, err := httputil.DumpRequestOut(req, true) + if err == nil { + log.Printf("[DEBUG] "+logReqMsg, t.name, prettyPrintJsonLines(reqData)) + } else { + log.Printf("[ERROR] %s API Request error: %#v", t.name, err) + } + } + + resp, err := t.transport.RoundTrip(req) + if err != nil { + return resp, err + } + + if IsDebugOrHigher() { + respData, err := httputil.DumpResponse(resp, true) + if err == nil { + log.Printf("[DEBUG] "+logRespMsg, t.name, prettyPrintJsonLines(respData)) + } else { + log.Printf("[ERROR] %s API Response error: %#v", t.name, err) + } + } + + return resp, nil +} + +func NewTransport(name string, t http.RoundTripper) *transport { + return &transport{name, t} +} + +// prettyPrintJsonLines iterates through a []byte line-by-line, +// transforming any lines that are complete json into pretty-printed json. +func prettyPrintJsonLines(b []byte) string { + parts := strings.Split(string(b), "\n") + for i, p := range parts { + if b := []byte(p); json.Valid(b) { + var out bytes.Buffer + json.Indent(&out, b, "", " ") + parts[i] = out.String() + } + } + return strings.Join(parts, "\n") +} + +const logReqMsg = `%s API Request Details: +---[ REQUEST ]--------------------------------------- +%s +-----------------------------------------------------` + +const logRespMsg = `%s API Response Details: +---[ RESPONSE ]-------------------------------------- +%s +-----------------------------------------------------` diff --git a/vendor/github.com/shurcooL/githubv4/.travis.yml b/vendor/github.com/shurcooL/githubv4/.travis.yml new file mode 100644 index 0000000000..437c57db3d --- /dev/null +++ b/vendor/github.com/shurcooL/githubv4/.travis.yml @@ -0,0 +1,16 @@ +sudo: false +language: go +go: + - 1.x + - master +matrix: + allow_failures: + - go: master + fast_finish: true +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d -s .) + - go vet ./... + - go test -v -race ./... diff --git a/vendor/github.com/shurcooL/githubv4/LICENSE b/vendor/github.com/shurcooL/githubv4/LICENSE new file mode 100644 index 0000000000..ca4c77642d --- /dev/null +++ b/vendor/github.com/shurcooL/githubv4/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Dmitri Shuralyov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/shurcooL/githubv4/README.md b/vendor/github.com/shurcooL/githubv4/README.md new file mode 100644 index 0000000000..319a6428ab --- /dev/null +++ b/vendor/github.com/shurcooL/githubv4/README.md @@ -0,0 +1,408 @@ +githubv4 +======== + +[![Build Status](https://travis-ci.org/shurcooL/githubv4.svg?branch=master)](https://travis-ci.org/shurcooL/githubv4) [![GoDoc](https://godoc.org/github.com/shurcooL/githubv4?status.svg)](https://godoc.org/github.com/shurcooL/githubv4) + +Package `githubv4` is a client library for accessing GitHub GraphQL API v4 (https://developer.github.com/v4/). + +If you're looking for a client library for GitHub REST API v3, the recommended package is [`github.com/google/go-github/github`](https://godoc.org/github.com/google/go-github/github). + +**Status:** In research and development. The API will change when opportunities for improvement are discovered; it is not yet frozen. + +Focus +----- + +- Friendly, simple and powerful API. +- Correctness, high performance and efficiency. +- Support all of GitHub GraphQL API v4 via code generation from schema. + +Installation +------------ + +`githubv4` requires Go version 1.8 or later. + +```bash +go get -u github.com/shurcooL/githubv4 +``` + +Usage +----- + +### Authentication + +GitHub GraphQL API v4 [requires authentication](https://developer.github.com/v4/guides/forming-calls/#authenticating-with-graphql). The `githubv4` package does not directly handle authentication. Instead, when creating a new client, you're expected to pass an `http.Client` that performs authentication. The easiest and recommended way to do this is to use the [`golang.org/x/oauth2`](https://golang.org/x/oauth2) package. You'll need an OAuth token from GitHub (for example, a [personal API token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/)) with the right scopes. Then: + +```Go +import "golang.org/x/oauth2" + +func main() { + src := oauth2.StaticTokenSource( + &oauth2.Token{AccessToken: os.Getenv("GITHUB_TOKEN")}, + ) + httpClient := oauth2.NewClient(context.Background(), src) + + client := githubv4.NewClient(httpClient) + // Use client... +} +``` + +### Simple Query + +To make a query, you need to define a Go type that corresponds to the GitHub GraphQL schema, and contains the fields you're interested in querying. You can look up the GitHub GraphQL schema at https://developer.github.com/v4/query/. + +For example, to make the following GraphQL query: + +```GraphQL +query { + viewer { + login + createdAt + } +} +``` + +You can define this variable: + +```Go +var query struct { + Viewer struct { + Login githubv4.String + CreatedAt githubv4.DateTime + } +} +``` + +Then call `client.Query`, passing a pointer to it: + +```Go +err := client.Query(context.Background(), &query, nil) +if err != nil { + // Handle error. +} +fmt.Println(" Login:", query.Viewer.Login) +fmt.Println("CreatedAt:", query.Viewer.CreatedAt) + +// Output: +// Login: gopher +// CreatedAt: 2017-05-26 21:17:14 +0000 UTC +``` + +### Scalar Types + +For each scalar in the GitHub GraphQL schema listed at https://developer.github.com/v4/scalar/, there is a corresponding Go type in package `githubv4`. + +You can use these types when writing queries: + +```Go +var query struct { + Viewer struct { + Login githubv4.String + CreatedAt githubv4.DateTime + IsBountyHunter githubv4.Boolean + BioHTML githubv4.HTML + WebsiteURL githubv4.URI + } +} +// Call client.Query() and use results in query... +``` + +However, depending on how you're planning to use the results of your query, it's often more convenient to use other Go types. + +The `encoding/json` rules are used for converting individual JSON-encoded fields from a GraphQL response into Go values. See https://godoc.org/encoding/json#Unmarshal for details. The [`json.Unmarshaler`](https://godoc.org/encoding/json#Unmarshaler) interface is respected. + +That means you can simplify the earlier query by using predeclared Go types: + +```Go +// import "time" + +var query struct { + Viewer struct { + Login string // E.g., "gopher". + CreatedAt time.Time // E.g., time.Date(2017, 5, 26, 21, 17, 14, 0, time.UTC). + IsBountyHunter bool // E.g., true. + BioHTML string // E.g., `I am learning GraphQL!`. + WebsiteURL string // E.g., "https://golang.org". + } +} +// Call client.Query() and use results in query... +``` + +The [`DateTime`](https://developer.github.com/v4/scalar/datetime/) scalar is described as "an ISO-8601 encoded UTC date string". If you wanted to fetch in that form without parsing it into a `time.Time`, you can use the `string` type. For example, this would work: + +```Go +// import "html/template" + +type MyBoolean bool + +var query struct { + Viewer struct { + Login string // E.g., "gopher". + CreatedAt string // E.g., "2017-05-26T21:17:14Z". + IsBountyHunter MyBoolean // E.g., MyBoolean(true). + BioHTML template.HTML // E.g., template.HTML(`I am learning GraphQL!`). + WebsiteURL template.URL // E.g., template.URL("https://golang.org"). + } +} +// Call client.Query() and use results in query... +``` + +### Arguments and Variables + +Often, you'll want to specify arguments on some fields. You can use the `graphql` struct field tag for this. + +For example, to make the following GraphQL query: + +```GraphQL +{ + repository(owner: "octocat", name: "Hello-World") { + description + } +} +``` + +You can define this variable: + +```Go +var q struct { + Repository struct { + Description string + } `graphql:"repository(owner: \"octocat\", name: \"Hello-World\")"` +} +``` + +Then call `client.Query`: + +```Go +err := client.Query(context.Background(), &q, nil) +if err != nil { + // Handle error. +} +fmt.Println(q.Repository.Description) + +// Output: +// My first repository on GitHub! +``` + +However, that'll only work if the arguments are constant and known in advance. Otherwise, you will need to make use of variables. Replace the constants in the struct field tag with variable names: + +```Go +// fetchRepoDescription fetches description of repo with owner and name. +func fetchRepoDescription(ctx context.Context, owner, name string) (string, error) { + var q struct { + Repository struct { + Description string + } `graphql:"repository(owner: $owner, name: $name)"` + } +``` + +When sending variables to GraphQL, you need to use exact types that match GraphQL scalar types, otherwise the GraphQL server will return an error. + +So, define a `variables` map with their values that are converted to GraphQL scalar types: + +```Go + variables := map[string]interface{}{ + "owner": githubv4.String(owner), + "name": githubv4.String(name), + } +``` + +Finally, call `client.Query` providing `variables`: + +```Go + err := client.Query(ctx, &q, variables) + return q.Repository.Description, err +} +``` + +### Inline Fragments + +Some GraphQL queries contain inline fragments. You can use the `graphql` struct field tag to express them. + +For example, to make the following GraphQL query: + +```GraphQL +{ + repositoryOwner(login: "github") { + login + ... on Organization { + description + } + ... on User { + bio + } + } +} +``` + +You can define this variable: + +```Go +var q struct { + RepositoryOwner struct { + Login string + Organization struct { + Description string + } `graphql:"... on Organization"` + User struct { + Bio string + } `graphql:"... on User"` + } `graphql:"repositoryOwner(login: \"github\")"` +} +``` + +Alternatively, you can define the struct types corresponding to inline fragments, and use them as embedded fields in your query: + +```Go +type ( + OrganizationFragment struct { + Description string + } + UserFragment struct { + Bio string + } +) + +var q struct { + RepositoryOwner struct { + Login string + OrganizationFragment `graphql:"... on Organization"` + UserFragment `graphql:"... on User"` + } `graphql:"repositoryOwner(login: \"github\")"` +} +``` + +Then call `client.Query`: + +```Go +err := client.Query(context.Background(), &q, nil) +if err != nil { + // Handle error. +} +fmt.Println(q.RepositoryOwner.Login) +fmt.Println(q.RepositoryOwner.Description) +fmt.Println(q.RepositoryOwner.Bio) + +// Output: +// github +// How people build software. +// +``` + +### Pagination + +Imagine you wanted to get a complete list of comments in an issue, and not just the first 10 or so. To do that, you'll need to perform multiple queries and use pagination information. For example: + +```Go +type comment struct { + Body string + Author struct { + Login string + AvatarURL string `graphql:"avatarUrl(size: 72)"` + } + ViewerCanReact bool +} +var q struct { + Repository struct { + Issue struct { + Comments struct { + Nodes []comment + PageInfo struct { + EndCursor githubv4.String + HasNextPage bool + } + } `graphql:"comments(first: 100, after: $commentsCursor)"` // 100 per page. + } `graphql:"issue(number: $issueNumber)"` + } `graphql:"repository(owner: $repositoryOwner, name: $repositoryName)"` +} +variables := map[string]interface{}{ + "repositoryOwner": githubv4.String(owner), + "repositoryName": githubv4.String(name), + "issueNumber": githubv4.Int(issue), + "commentsCursor": (*githubv4.String)(nil), // Null after argument to get first page. +} + +// Get comments from all pages. +var allComments []comment +for { + err := s.clQL.Query(ctx, &q, variables) + if err != nil { + return err + } + allComments = append(allComments, q.Repository.Issue.Comments.Nodes...) + if !q.Repository.Issue.Comments.PageInfo.HasNextPage { + break + } + variables["commentsCursor"] = githubv4.NewString(q.Repository.Issue.Comments.PageInfo.EndCursor) +} +``` + +There is more than one way to perform pagination. Consider additional fields inside [`PageInfo`](https://developer.github.com/v4/object/pageinfo/) object. + +### Mutations + +Mutations often require information that you can only find out by performing a query first. Let's suppose you've already done that. + +For example, to make the following GraphQL mutation: + +```GraphQL +mutation($input: AddReactionInput!) { + addReaction(input: $input) { + reaction { + content + } + subject { + id + } + } +} +variables { + "input": { + "subjectId": "MDU6SXNzdWUyMTc5NTQ0OTc=", + "content": "HOORAY" + } +} +``` + +You can define: + +```Go +var m struct { + AddReaction struct { + Reaction struct { + Content githubv4.ReactionContent + } + Subject struct { + ID githubv4.ID + } + } `graphql:"addReaction(input: $input)"` +} +input := githubv4.AddReactionInput{ + SubjectID: targetIssue.ID, // ID of the target issue from a previous query. + Content: githubv4.ReactionContentHooray, +} +``` + +Then call `client.Mutate`: + +```Go +err := client.Mutate(context.Background(), &m, input, nil) +if err != nil { + // Handle error. +} +fmt.Printf("Added a %v reaction to subject with ID %#v!\n", m.AddReaction.Reaction.Content, m.AddReaction.Subject.ID) + +// Output: +// Added a HOORAY reaction to subject with ID "MDU6SXNzdWUyMTc5NTQ0OTc="! +``` + +Directories +----------- + +| Path | Synopsis | +|-------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------| +| [example/githubv4dev](https://godoc.org/github.com/shurcooL/githubv4/example/githubv4dev) | githubv4dev is a test program currently being used for developing githubv4 package. | + +License +------- + +- [MIT License](LICENSE) diff --git a/vendor/github.com/shurcooL/githubv4/doc.go b/vendor/github.com/shurcooL/githubv4/doc.go new file mode 100644 index 0000000000..0ce1a508a8 --- /dev/null +++ b/vendor/github.com/shurcooL/githubv4/doc.go @@ -0,0 +1,13 @@ +// Package githubv4 is a client library for accessing GitHub +// GraphQL API v4 (https://developer.github.com/v4/). +// +// If you're looking for a client library for GitHub REST API v3, +// the recommended package is github.com/google/go-github/github. +// +// Status: In active early research and development. The API will change when +// opportunities for improvement are discovered; it is not yet frozen. +// +// For now, see README for more details. +package githubv4 // import "github.com/shurcooL/githubv4" + +//go:generate go run gen.go diff --git a/vendor/github.com/shurcooL/githubv4/enum.go b/vendor/github.com/shurcooL/githubv4/enum.go new file mode 100644 index 0000000000..46fbeb0da1 --- /dev/null +++ b/vendor/github.com/shurcooL/githubv4/enum.go @@ -0,0 +1,1382 @@ +// Code generated by gen.go; DO NOT EDIT. + +package githubv4 + +// ActionExecutionCapabilitySetting represents the possible capabilities for action executions setting. +type ActionExecutionCapabilitySetting string + +// The possible capabilities for action executions setting. +const ( + ActionExecutionCapabilitySettingDisabled ActionExecutionCapabilitySetting = "DISABLED" // All action executions are disabled. + ActionExecutionCapabilitySettingAllActions ActionExecutionCapabilitySetting = "ALL_ACTIONS" // All action executions are enabled. + ActionExecutionCapabilitySettingLocalActionsOnly ActionExecutionCapabilitySetting = "LOCAL_ACTIONS_ONLY" // Only actions defined within the repo are allowed. + ActionExecutionCapabilitySettingNoPolicy ActionExecutionCapabilitySetting = "NO_POLICY" // Organization administrators action execution capabilities. +) + +// AuditLogOrderField represents properties by which Audit Log connections can be ordered. +type AuditLogOrderField string + +// Properties by which Audit Log connections can be ordered. +const ( + AuditLogOrderFieldCreatedAt AuditLogOrderField = "CREATED_AT" // Order audit log entries by timestamp. +) + +// CollaboratorAffiliation represents collaborators affiliation level with a subject. +type CollaboratorAffiliation string + +// Collaborators affiliation level with a subject. +const ( + CollaboratorAffiliationOutside CollaboratorAffiliation = "OUTSIDE" // All outside collaborators of an organization-owned subject. + CollaboratorAffiliationDirect CollaboratorAffiliation = "DIRECT" // All collaborators with permissions to an organization-owned subject, regardless of organization membership status. + CollaboratorAffiliationAll CollaboratorAffiliation = "ALL" // All collaborators the authenticated user can see. +) + +// CommentAuthorAssociation represents a comment author association with repository. +type CommentAuthorAssociation string + +// A comment author association with repository. +const ( + CommentAuthorAssociationMember CommentAuthorAssociation = "MEMBER" // Author is a member of the organization that owns the repository. + CommentAuthorAssociationOwner CommentAuthorAssociation = "OWNER" // Author is the owner of the repository. + CommentAuthorAssociationCollaborator CommentAuthorAssociation = "COLLABORATOR" // Author has been invited to collaborate on the repository. + CommentAuthorAssociationContributor CommentAuthorAssociation = "CONTRIBUTOR" // Author has previously committed to the repository. + CommentAuthorAssociationFirstTimeContributor CommentAuthorAssociation = "FIRST_TIME_CONTRIBUTOR" // Author has not previously committed to the repository. + CommentAuthorAssociationFirstTimer CommentAuthorAssociation = "FIRST_TIMER" // Author has not previously committed to GitHub. + CommentAuthorAssociationNone CommentAuthorAssociation = "NONE" // Author has no association with the repository. +) + +// CommentCannotUpdateReason represents the possible errors that will prevent a user from updating a comment. +type CommentCannotUpdateReason string + +// The possible errors that will prevent a user from updating a comment. +const ( + CommentCannotUpdateReasonArchived CommentCannotUpdateReason = "ARCHIVED" // Unable to create comment because repository is archived. + CommentCannotUpdateReasonInsufficientAccess CommentCannotUpdateReason = "INSUFFICIENT_ACCESS" // You must be the author or have write access to this repository to update this comment. + CommentCannotUpdateReasonLocked CommentCannotUpdateReason = "LOCKED" // Unable to create comment because issue is locked. + CommentCannotUpdateReasonLoginRequired CommentCannotUpdateReason = "LOGIN_REQUIRED" // You must be logged in to update this comment. + CommentCannotUpdateReasonMaintenance CommentCannotUpdateReason = "MAINTENANCE" // Repository is under maintenance. + CommentCannotUpdateReasonVerifiedEmailRequired CommentCannotUpdateReason = "VERIFIED_EMAIL_REQUIRED" // At least one email address must be verified to update this comment. + CommentCannotUpdateReasonDenied CommentCannotUpdateReason = "DENIED" // You cannot update this comment. +) + +// CommitContributionOrderField represents properties by which commit contribution connections can be ordered. +type CommitContributionOrderField string + +// Properties by which commit contribution connections can be ordered. +const ( + CommitContributionOrderFieldOccurredAt CommitContributionOrderField = "OCCURRED_AT" // Order commit contributions by when they were made. + CommitContributionOrderFieldCommitCount CommitContributionOrderField = "COMMIT_COUNT" // Order commit contributions by how many commits they represent. +) + +// ContributionOrderField represents properties by which contribution connections can be ordered. +type ContributionOrderField string + +// Properties by which contribution connections can be ordered. +const ( + ContributionOrderFieldOccurredAt ContributionOrderField = "OCCURRED_AT" // Order contributions by when they were made. +) + +// DefaultRepositoryPermissionField represents the possible default permissions for repositories. +type DefaultRepositoryPermissionField string + +// The possible default permissions for repositories. +const ( + DefaultRepositoryPermissionFieldNone DefaultRepositoryPermissionField = "NONE" // No access. + DefaultRepositoryPermissionFieldRead DefaultRepositoryPermissionField = "READ" // Can read repos by default. + DefaultRepositoryPermissionFieldWrite DefaultRepositoryPermissionField = "WRITE" // Can read and write repos by default. + DefaultRepositoryPermissionFieldAdmin DefaultRepositoryPermissionField = "ADMIN" // Can read, write, and administrate repos by default. +) + +// DeploymentOrderField represents properties by which deployment connections can be ordered. +type DeploymentOrderField string + +// Properties by which deployment connections can be ordered. +const ( + DeploymentOrderFieldCreatedAt DeploymentOrderField = "CREATED_AT" // Order collection by creation time. +) + +// DeploymentState represents the possible states in which a deployment can be. +type DeploymentState string + +// The possible states in which a deployment can be. +const ( + DeploymentStateAbandoned DeploymentState = "ABANDONED" // The pending deployment was not updated after 30 minutes. + DeploymentStateActive DeploymentState = "ACTIVE" // The deployment is currently active. + DeploymentStateDestroyed DeploymentState = "DESTROYED" // An inactive transient deployment. + DeploymentStateError DeploymentState = "ERROR" // The deployment experienced an error. + DeploymentStateFailure DeploymentState = "FAILURE" // The deployment has failed. + DeploymentStateInactive DeploymentState = "INACTIVE" // The deployment is inactive. + DeploymentStatePending DeploymentState = "PENDING" // The deployment is pending. + DeploymentStateQueued DeploymentState = "QUEUED" // The deployment has queued. + DeploymentStateInProgress DeploymentState = "IN_PROGRESS" // The deployment is in progress. +) + +// DeploymentStatusState represents the possible states for a deployment status. +type DeploymentStatusState string + +// The possible states for a deployment status. +const ( + DeploymentStatusStatePending DeploymentStatusState = "PENDING" // The deployment is pending. + DeploymentStatusStateSuccess DeploymentStatusState = "SUCCESS" // The deployment was successful. + DeploymentStatusStateFailure DeploymentStatusState = "FAILURE" // The deployment has failed. + DeploymentStatusStateInactive DeploymentStatusState = "INACTIVE" // The deployment is inactive. + DeploymentStatusStateError DeploymentStatusState = "ERROR" // The deployment experienced an error. + DeploymentStatusStateQueued DeploymentStatusState = "QUEUED" // The deployment is queued. + DeploymentStatusStateInProgress DeploymentStatusState = "IN_PROGRESS" // The deployment is in progress. +) + +// EnterpriseAdministratorInvitationOrderField represents properties by which enterprise administrator invitation connections can be ordered. +type EnterpriseAdministratorInvitationOrderField string + +// Properties by which enterprise administrator invitation connections can be ordered. +const ( + EnterpriseAdministratorInvitationOrderFieldCreatedAt EnterpriseAdministratorInvitationOrderField = "CREATED_AT" // Order enterprise administrator member invitations by creation time. +) + +// EnterpriseAdministratorRole represents the possible administrator roles in an enterprise account. +type EnterpriseAdministratorRole string + +// The possible administrator roles in an enterprise account. +const ( + EnterpriseAdministratorRoleOwner EnterpriseAdministratorRole = "OWNER" // Represents an owner of the enterprise account. + EnterpriseAdministratorRoleBillingManager EnterpriseAdministratorRole = "BILLING_MANAGER" // Represents a billing manager of the enterprise account. +) + +// EnterpriseDefaultRepositoryPermissionSettingValue represents the possible values for the enterprise default repository permission setting. +type EnterpriseDefaultRepositoryPermissionSettingValue string + +// The possible values for the enterprise default repository permission setting. +const ( + EnterpriseDefaultRepositoryPermissionSettingValueNoPolicy EnterpriseDefaultRepositoryPermissionSettingValue = "NO_POLICY" // Organizations in the enterprise choose default repository permissions for their members. + EnterpriseDefaultRepositoryPermissionSettingValueAdmin EnterpriseDefaultRepositoryPermissionSettingValue = "ADMIN" // Organization members will be able to clone, pull, push, and add new collaborators to all organization repositories. + EnterpriseDefaultRepositoryPermissionSettingValueWrite EnterpriseDefaultRepositoryPermissionSettingValue = "WRITE" // Organization members will be able to clone, pull, and push all organization repositories. + EnterpriseDefaultRepositoryPermissionSettingValueRead EnterpriseDefaultRepositoryPermissionSettingValue = "READ" // Organization members will be able to clone and pull all organization repositories. + EnterpriseDefaultRepositoryPermissionSettingValueNone EnterpriseDefaultRepositoryPermissionSettingValue = "NONE" // Organization members will only be able to clone and pull public repositories. +) + +// EnterpriseEnabledDisabledSettingValue represents the possible values for an enabled/disabled enterprise setting. +type EnterpriseEnabledDisabledSettingValue string + +// The possible values for an enabled/disabled enterprise setting. +const ( + EnterpriseEnabledDisabledSettingValueEnabled EnterpriseEnabledDisabledSettingValue = "ENABLED" // The setting is enabled for organizations in the enterprise. + EnterpriseEnabledDisabledSettingValueDisabled EnterpriseEnabledDisabledSettingValue = "DISABLED" // The setting is disabled for organizations in the enterprise. + EnterpriseEnabledDisabledSettingValueNoPolicy EnterpriseEnabledDisabledSettingValue = "NO_POLICY" // There is no policy set for organizations in the enterprise. +) + +// EnterpriseEnabledSettingValue represents the possible values for an enabled/no policy enterprise setting. +type EnterpriseEnabledSettingValue string + +// The possible values for an enabled/no policy enterprise setting. +const ( + EnterpriseEnabledSettingValueEnabled EnterpriseEnabledSettingValue = "ENABLED" // The setting is enabled for organizations in the enterprise. + EnterpriseEnabledSettingValueNoPolicy EnterpriseEnabledSettingValue = "NO_POLICY" // There is no policy set for organizations in the enterprise. +) + +// EnterpriseMemberOrderField represents properties by which enterprise member connections can be ordered. +type EnterpriseMemberOrderField string + +// Properties by which enterprise member connections can be ordered. +const ( + EnterpriseMemberOrderFieldLogin EnterpriseMemberOrderField = "LOGIN" // Order enterprise members by login. + EnterpriseMemberOrderFieldCreatedAt EnterpriseMemberOrderField = "CREATED_AT" // Order enterprise members by creation time. +) + +// EnterpriseMembersCanCreateRepositoriesSettingValue represents the possible values for the enterprise members can create repositories setting. +type EnterpriseMembersCanCreateRepositoriesSettingValue string + +// The possible values for the enterprise members can create repositories setting. +const ( + EnterpriseMembersCanCreateRepositoriesSettingValueNoPolicy EnterpriseMembersCanCreateRepositoriesSettingValue = "NO_POLICY" // Organization administrators choose whether to allow members to create repositories. + EnterpriseMembersCanCreateRepositoriesSettingValueAll EnterpriseMembersCanCreateRepositoriesSettingValue = "ALL" // Members will be able to create public and private repositories. + EnterpriseMembersCanCreateRepositoriesSettingValuePublic EnterpriseMembersCanCreateRepositoriesSettingValue = "PUBLIC" // Members will be able to create only public repositories. + EnterpriseMembersCanCreateRepositoriesSettingValuePrivate EnterpriseMembersCanCreateRepositoriesSettingValue = "PRIVATE" // Members will be able to create only private repositories. + EnterpriseMembersCanCreateRepositoriesSettingValueDisabled EnterpriseMembersCanCreateRepositoriesSettingValue = "DISABLED" // Members will not be able to create public or private repositories. +) + +// EnterpriseMembersCanMakePurchasesSettingValue represents the possible values for the members can make purchases setting. +type EnterpriseMembersCanMakePurchasesSettingValue string + +// The possible values for the members can make purchases setting. +const ( + EnterpriseMembersCanMakePurchasesSettingValueEnabled EnterpriseMembersCanMakePurchasesSettingValue = "ENABLED" // The setting is enabled for organizations in the enterprise. + EnterpriseMembersCanMakePurchasesSettingValueDisabled EnterpriseMembersCanMakePurchasesSettingValue = "DISABLED" // The setting is disabled for organizations in the enterprise. +) + +// EnterpriseMembershipType represents the possible values we have for filtering Platform::Objects::User#enterprises. +type EnterpriseMembershipType string + +// The possible values we have for filtering Platform::Objects::User#enterprises. +const ( + EnterpriseMembershipTypeAll EnterpriseMembershipType = "ALL" // Returns all enterprises in which the user is a member, admin, or billing manager. + EnterpriseMembershipTypeAdmin EnterpriseMembershipType = "ADMIN" // Returns all enterprises in which the user is an admin. + EnterpriseMembershipTypeBillingManager EnterpriseMembershipType = "BILLING_MANAGER" // Returns all enterprises in which the user is a billing manager. + EnterpriseMembershipTypeOrgMembership EnterpriseMembershipType = "ORG_MEMBERSHIP" // Returns all enterprises in which the user is a member of an org that is owned by the enterprise. +) + +// EnterpriseOrderField represents properties by which enterprise connections can be ordered. +type EnterpriseOrderField string + +// Properties by which enterprise connections can be ordered. +const ( + EnterpriseOrderFieldName EnterpriseOrderField = "NAME" // Order enterprises by name. +) + +// EnterpriseServerInstallationOrderField represents properties by which Enterprise Server installation connections can be ordered. +type EnterpriseServerInstallationOrderField string + +// Properties by which Enterprise Server installation connections can be ordered. +const ( + EnterpriseServerInstallationOrderFieldHostName EnterpriseServerInstallationOrderField = "HOST_NAME" // Order Enterprise Server installations by host name. + EnterpriseServerInstallationOrderFieldCustomerName EnterpriseServerInstallationOrderField = "CUSTOMER_NAME" // Order Enterprise Server installations by customer name. + EnterpriseServerInstallationOrderFieldCreatedAt EnterpriseServerInstallationOrderField = "CREATED_AT" // Order Enterprise Server installations by creation time. +) + +// EnterpriseServerUserAccountEmailOrderField represents properties by which Enterprise Server user account email connections can be ordered. +type EnterpriseServerUserAccountEmailOrderField string + +// Properties by which Enterprise Server user account email connections can be ordered. +const ( + EnterpriseServerUserAccountEmailOrderFieldEmail EnterpriseServerUserAccountEmailOrderField = "EMAIL" // Order emails by email. +) + +// EnterpriseServerUserAccountOrderField represents properties by which Enterprise Server user account connections can be ordered. +type EnterpriseServerUserAccountOrderField string + +// Properties by which Enterprise Server user account connections can be ordered. +const ( + EnterpriseServerUserAccountOrderFieldLogin EnterpriseServerUserAccountOrderField = "LOGIN" // Order user accounts by login. + EnterpriseServerUserAccountOrderFieldRemoteCreatedAt EnterpriseServerUserAccountOrderField = "REMOTE_CREATED_AT" // Order user accounts by creation time on the Enterprise Server installation. +) + +// EnterpriseServerUserAccountsUploadOrderField represents properties by which Enterprise Server user accounts upload connections can be ordered. +type EnterpriseServerUserAccountsUploadOrderField string + +// Properties by which Enterprise Server user accounts upload connections can be ordered. +const ( + EnterpriseServerUserAccountsUploadOrderFieldCreatedAt EnterpriseServerUserAccountsUploadOrderField = "CREATED_AT" // Order user accounts uploads by creation time. +) + +// EnterpriseServerUserAccountsUploadSyncState represents synchronization state of the Enterprise Server user accounts upload. +type EnterpriseServerUserAccountsUploadSyncState string + +// Synchronization state of the Enterprise Server user accounts upload. +const ( + EnterpriseServerUserAccountsUploadSyncStatePending EnterpriseServerUserAccountsUploadSyncState = "PENDING" // The synchronization of the upload is pending. + EnterpriseServerUserAccountsUploadSyncStateSuccess EnterpriseServerUserAccountsUploadSyncState = "SUCCESS" // The synchronization of the upload succeeded. + EnterpriseServerUserAccountsUploadSyncStateFailure EnterpriseServerUserAccountsUploadSyncState = "FAILURE" // The synchronization of the upload failed. +) + +// EnterpriseUserAccountMembershipRole represents the possible roles for enterprise membership. +type EnterpriseUserAccountMembershipRole string + +// The possible roles for enterprise membership. +const ( + EnterpriseUserAccountMembershipRoleMember EnterpriseUserAccountMembershipRole = "MEMBER" // The user is a member of the enterprise membership. + EnterpriseUserAccountMembershipRoleOwner EnterpriseUserAccountMembershipRole = "OWNER" // The user is an owner of the enterprise membership. +) + +// EnterpriseUserDeployment represents the possible GitHub Enterprise deployments where this user can exist. +type EnterpriseUserDeployment string + +// The possible GitHub Enterprise deployments where this user can exist. +const ( + EnterpriseUserDeploymentCloud EnterpriseUserDeployment = "CLOUD" // The user is part of a GitHub Enterprise Cloud deployment. + EnterpriseUserDeploymentServer EnterpriseUserDeployment = "SERVER" // The user is part of a GitHub Enterprise Server deployment. +) + +// FundingPlatform represents the possible funding platforms for repository funding links. +type FundingPlatform string + +// The possible funding platforms for repository funding links. +const ( + FundingPlatformGitHub FundingPlatform = "GITHUB" // GitHub funding platform. + FundingPlatformPatreon FundingPlatform = "PATREON" // Patreon funding platform. + FundingPlatformOpenCollective FundingPlatform = "OPEN_COLLECTIVE" // Open Collective funding platform. + FundingPlatformKoFi FundingPlatform = "KO_FI" // Ko-fi funding platform. + FundingPlatformTidelift FundingPlatform = "TIDELIFT" // Tidelift funding platform. + FundingPlatformCommunityBridge FundingPlatform = "COMMUNITY_BRIDGE" // Community Bridge funding platform. + FundingPlatformLiberapay FundingPlatform = "LIBERAPAY" // Liberapay funding platform. + FundingPlatformIssuehunt FundingPlatform = "ISSUEHUNT" // IssueHunt funding platform. + FundingPlatformOtechie FundingPlatform = "OTECHIE" // Otechie funding platform. + FundingPlatformCustom FundingPlatform = "CUSTOM" // Custom funding platform. +) + +// GistOrderField represents properties by which gist connections can be ordered. +type GistOrderField string + +// Properties by which gist connections can be ordered. +const ( + GistOrderFieldCreatedAt GistOrderField = "CREATED_AT" // Order gists by creation time. + GistOrderFieldUpdatedAt GistOrderField = "UPDATED_AT" // Order gists by update time. + GistOrderFieldPushedAt GistOrderField = "PUSHED_AT" // Order gists by push time. +) + +// GistPrivacy represents the privacy of a Gist. +type GistPrivacy string + +// The privacy of a Gist. +const ( + GistPrivacyPublic GistPrivacy = "PUBLIC" // Public. + GistPrivacySecret GistPrivacy = "SECRET" // Secret. + GistPrivacyAll GistPrivacy = "ALL" // Gists that are public and secret. +) + +// GitSignatureState represents the state of a Git signature. +type GitSignatureState string + +// The state of a Git signature. +const ( + GitSignatureStateValid GitSignatureState = "VALID" // Valid signature and verified by GitHub. + GitSignatureStateInvalid GitSignatureState = "INVALID" // Invalid signature. + GitSignatureStateMalformedSig GitSignatureState = "MALFORMED_SIG" // Malformed signature. + GitSignatureStateUnknownKey GitSignatureState = "UNKNOWN_KEY" // Key used for signing not known to GitHub. + GitSignatureStateBadEmail GitSignatureState = "BAD_EMAIL" // Invalid email used for signing. + GitSignatureStateUnverifiedEmail GitSignatureState = "UNVERIFIED_EMAIL" // Email used for signing unverified on GitHub. + GitSignatureStateNoUser GitSignatureState = "NO_USER" // Email used for signing not known to GitHub. + GitSignatureStateUnknownSigType GitSignatureState = "UNKNOWN_SIG_TYPE" // Unknown signature type. + GitSignatureStateUnsigned GitSignatureState = "UNSIGNED" // Unsigned. + GitSignatureStateGpgverifyUnavailable GitSignatureState = "GPGVERIFY_UNAVAILABLE" // Internal error - the GPG verification service is unavailable at the moment. + GitSignatureStateGpgverifyError GitSignatureState = "GPGVERIFY_ERROR" // Internal error - the GPG verification service misbehaved. + GitSignatureStateNotSigningKey GitSignatureState = "NOT_SIGNING_KEY" // The usage flags for the key that signed this don't allow signing. + GitSignatureStateExpiredKey GitSignatureState = "EXPIRED_KEY" // Signing key expired. + GitSignatureStateOcspPending GitSignatureState = "OCSP_PENDING" // Valid signature, pending certificate revocation checking. + GitSignatureStateOcspError GitSignatureState = "OCSP_ERROR" // Valid siganture, though certificate revocation check failed. + GitSignatureStateBadCert GitSignatureState = "BAD_CERT" // The signing certificate or its chain could not be verified. + GitSignatureStateOcspRevoked GitSignatureState = "OCSP_REVOKED" // One or more certificates in chain has been revoked. +) + +// IdentityProviderConfigurationState represents the possible states in which authentication can be configured with an identity provider. +type IdentityProviderConfigurationState string + +// The possible states in which authentication can be configured with an identity provider. +const ( + IdentityProviderConfigurationStateEnforced IdentityProviderConfigurationState = "ENFORCED" // Authentication with an identity provider is configured and enforced. + IdentityProviderConfigurationStateConfigured IdentityProviderConfigurationState = "CONFIGURED" // Authentication with an identity provider is configured but not enforced. + IdentityProviderConfigurationStateUnconfigured IdentityProviderConfigurationState = "UNCONFIGURED" // Authentication with an identity provider is not configured. +) + +// IssueOrderField represents properties by which issue connections can be ordered. +type IssueOrderField string + +// Properties by which issue connections can be ordered. +const ( + IssueOrderFieldCreatedAt IssueOrderField = "CREATED_AT" // Order issues by creation time. + IssueOrderFieldUpdatedAt IssueOrderField = "UPDATED_AT" // Order issues by update time. + IssueOrderFieldComments IssueOrderField = "COMMENTS" // Order issues by comment count. +) + +// IssuePubSubTopic represents the possible PubSub channels for an issue. +type IssuePubSubTopic string + +// The possible PubSub channels for an issue. +const ( + IssuePubSubTopicUpdated IssuePubSubTopic = "UPDATED" // The channel ID for observing issue updates. + IssuePubSubTopicMarkasread IssuePubSubTopic = "MARKASREAD" // The channel ID for marking an issue as read. + IssuePubSubTopicTimeline IssuePubSubTopic = "TIMELINE" // The channel ID for updating items on the issue timeline. + IssuePubSubTopicState IssuePubSubTopic = "STATE" // The channel ID for observing issue state updates. +) + +// IssueState represents the possible states of an issue. +type IssueState string + +// The possible states of an issue. +const ( + IssueStateOpen IssueState = "OPEN" // An issue that is still open. + IssueStateClosed IssueState = "CLOSED" // An issue that has been closed. +) + +// IssueTimelineItemsItemType represents the possible item types found in a timeline. +type IssueTimelineItemsItemType string + +// The possible item types found in a timeline. +const ( + IssueTimelineItemsItemTypeIssueComment IssueTimelineItemsItemType = "ISSUE_COMMENT" // Represents a comment on an Issue. + IssueTimelineItemsItemTypeCrossReferencedEvent IssueTimelineItemsItemType = "CROSS_REFERENCED_EVENT" // Represents a mention made by one issue or pull request to another. + IssueTimelineItemsItemTypeAddedToProjectEvent IssueTimelineItemsItemType = "ADDED_TO_PROJECT_EVENT" // Represents a 'added_to_project' event on a given issue or pull request. + IssueTimelineItemsItemTypeAssignedEvent IssueTimelineItemsItemType = "ASSIGNED_EVENT" // Represents an 'assigned' event on any assignable object. + IssueTimelineItemsItemTypeClosedEvent IssueTimelineItemsItemType = "CLOSED_EVENT" // Represents a 'closed' event on any `Closable`. + IssueTimelineItemsItemTypeCommentDeletedEvent IssueTimelineItemsItemType = "COMMENT_DELETED_EVENT" // Represents a 'comment_deleted' event on a given issue or pull request. + IssueTimelineItemsItemTypeConvertedNoteToIssueEvent IssueTimelineItemsItemType = "CONVERTED_NOTE_TO_ISSUE_EVENT" // Represents a 'converted_note_to_issue' event on a given issue or pull request. + IssueTimelineItemsItemTypeDemilestonedEvent IssueTimelineItemsItemType = "DEMILESTONED_EVENT" // Represents a 'demilestoned' event on a given issue or pull request. + IssueTimelineItemsItemTypeLabeledEvent IssueTimelineItemsItemType = "LABELED_EVENT" // Represents a 'labeled' event on a given issue or pull request. + IssueTimelineItemsItemTypeLockedEvent IssueTimelineItemsItemType = "LOCKED_EVENT" // Represents a 'locked' event on a given issue or pull request. + IssueTimelineItemsItemTypeMarkedAsDuplicateEvent IssueTimelineItemsItemType = "MARKED_AS_DUPLICATE_EVENT" // Represents a 'marked_as_duplicate' event on a given issue or pull request. + IssueTimelineItemsItemTypeMentionedEvent IssueTimelineItemsItemType = "MENTIONED_EVENT" // Represents a 'mentioned' event on a given issue or pull request. + IssueTimelineItemsItemTypeMilestonedEvent IssueTimelineItemsItemType = "MILESTONED_EVENT" // Represents a 'milestoned' event on a given issue or pull request. + IssueTimelineItemsItemTypeMovedColumnsInProjectEvent IssueTimelineItemsItemType = "MOVED_COLUMNS_IN_PROJECT_EVENT" // Represents a 'moved_columns_in_project' event on a given issue or pull request. + IssueTimelineItemsItemTypePinnedEvent IssueTimelineItemsItemType = "PINNED_EVENT" // Represents a 'pinned' event on a given issue or pull request. + IssueTimelineItemsItemTypeReferencedEvent IssueTimelineItemsItemType = "REFERENCED_EVENT" // Represents a 'referenced' event on a given `ReferencedSubject`. + IssueTimelineItemsItemTypeRemovedFromProjectEvent IssueTimelineItemsItemType = "REMOVED_FROM_PROJECT_EVENT" // Represents a 'removed_from_project' event on a given issue or pull request. + IssueTimelineItemsItemTypeRenamedTitleEvent IssueTimelineItemsItemType = "RENAMED_TITLE_EVENT" // Represents a 'renamed' event on a given issue or pull request. + IssueTimelineItemsItemTypeReopenedEvent IssueTimelineItemsItemType = "REOPENED_EVENT" // Represents a 'reopened' event on any `Closable`. + IssueTimelineItemsItemTypeSubscribedEvent IssueTimelineItemsItemType = "SUBSCRIBED_EVENT" // Represents a 'subscribed' event on a given `Subscribable`. + IssueTimelineItemsItemTypeTransferredEvent IssueTimelineItemsItemType = "TRANSFERRED_EVENT" // Represents a 'transferred' event on a given issue or pull request. + IssueTimelineItemsItemTypeUnassignedEvent IssueTimelineItemsItemType = "UNASSIGNED_EVENT" // Represents an 'unassigned' event on any assignable object. + IssueTimelineItemsItemTypeUnlabeledEvent IssueTimelineItemsItemType = "UNLABELED_EVENT" // Represents an 'unlabeled' event on a given issue or pull request. + IssueTimelineItemsItemTypeUnlockedEvent IssueTimelineItemsItemType = "UNLOCKED_EVENT" // Represents an 'unlocked' event on a given issue or pull request. + IssueTimelineItemsItemTypeUserBlockedEvent IssueTimelineItemsItemType = "USER_BLOCKED_EVENT" // Represents a 'user_blocked' event on a given user. + IssueTimelineItemsItemTypeUnpinnedEvent IssueTimelineItemsItemType = "UNPINNED_EVENT" // Represents an 'unpinned' event on a given issue or pull request. + IssueTimelineItemsItemTypeUnsubscribedEvent IssueTimelineItemsItemType = "UNSUBSCRIBED_EVENT" // Represents an 'unsubscribed' event on a given `Subscribable`. +) + +// LanguageOrderField represents properties by which language connections can be ordered. +type LanguageOrderField string + +// Properties by which language connections can be ordered. +const ( + LanguageOrderFieldSize LanguageOrderField = "SIZE" // Order languages by the size of all files containing the language. +) + +// LockReason represents the possible reasons that an issue or pull request was locked. +type LockReason string + +// The possible reasons that an issue or pull request was locked. +const ( + LockReasonOffTopic LockReason = "OFF_TOPIC" // The issue or pull request was locked because the conversation was off-topic. + LockReasonTooHeated LockReason = "TOO_HEATED" // The issue or pull request was locked because the conversation was too heated. + LockReasonResolved LockReason = "RESOLVED" // The issue or pull request was locked because the conversation was resolved. + LockReasonSpam LockReason = "SPAM" // The issue or pull request was locked because the conversation was spam. +) + +// MergeableState represents whether or not a PullRequest can be merged. +type MergeableState string + +// Whether or not a PullRequest can be merged. +const ( + MergeableStateMergeable MergeableState = "MERGEABLE" // The pull request can be merged. + MergeableStateConflicting MergeableState = "CONFLICTING" // The pull request cannot be merged due to merge conflicts. + MergeableStateUnknown MergeableState = "UNKNOWN" // The mergeability of the pull request is still being calculated. +) + +// MilestoneOrderField represents properties by which milestone connections can be ordered. +type MilestoneOrderField string + +// Properties by which milestone connections can be ordered. +const ( + MilestoneOrderFieldDueDate MilestoneOrderField = "DUE_DATE" // Order milestones by when they are due. + MilestoneOrderFieldCreatedAt MilestoneOrderField = "CREATED_AT" // Order milestones by when they were created. + MilestoneOrderFieldUpdatedAt MilestoneOrderField = "UPDATED_AT" // Order milestones by when they were last updated. + MilestoneOrderFieldNumber MilestoneOrderField = "NUMBER" // Order milestones by their number. +) + +// MilestoneState represents the possible states of a milestone. +type MilestoneState string + +// The possible states of a milestone. +const ( + MilestoneStateOpen MilestoneState = "OPEN" // A milestone that is still open. + MilestoneStateClosed MilestoneState = "CLOSED" // A milestone that has been closed. +) + +// OauthApplicationCreateAuditEntryState represents the state of an OAuth Application when it was created. +type OauthApplicationCreateAuditEntryState string + +// The state of an OAuth Application when it was created. +const ( + OauthApplicationCreateAuditEntryStateActive OauthApplicationCreateAuditEntryState = "ACTIVE" // The OAuth Application was active and allowed to have OAuth Accesses. + OauthApplicationCreateAuditEntryStateSuspended OauthApplicationCreateAuditEntryState = "SUSPENDED" // The OAuth Application was suspended from generating OAuth Accesses due to abuse or security concerns. + OauthApplicationCreateAuditEntryStatePendingDeletion OauthApplicationCreateAuditEntryState = "PENDING_DELETION" // The OAuth Application was in the process of being deleted. +) + +// OauthApplicationRevokeTokensAuditEntryState represents the state of an OAuth Application when its tokens were revoked. +type OauthApplicationRevokeTokensAuditEntryState string + +// The state of an OAuth Application when its tokens were revoked. +const ( + OauthApplicationRevokeTokensAuditEntryStateActive OauthApplicationRevokeTokensAuditEntryState = "ACTIVE" // The OAuth Application was active and allowed to have OAuth Accesses. + OauthApplicationRevokeTokensAuditEntryStateSuspended OauthApplicationRevokeTokensAuditEntryState = "SUSPENDED" // The OAuth Application was suspended from generating OAuth Accesses due to abuse or security concerns. + OauthApplicationRevokeTokensAuditEntryStatePendingDeletion OauthApplicationRevokeTokensAuditEntryState = "PENDING_DELETION" // The OAuth Application was in the process of being deleted. +) + +// OperationType represents the corresponding operation type for the action. +type OperationType string + +// The corresponding operation type for the action. +const ( + OperationTypeAccess OperationType = "ACCESS" // An existing resource was accessed. + OperationTypeAuthentication OperationType = "AUTHENTICATION" // A resource performed an authentication event. + OperationTypeCreate OperationType = "CREATE" // A new resource was created. + OperationTypeModify OperationType = "MODIFY" // An existing resource was modified. + OperationTypeRemove OperationType = "REMOVE" // An existing resource was removed. + OperationTypeRestore OperationType = "RESTORE" // An existing resource was restored. + OperationTypeTransfer OperationType = "TRANSFER" // An existing resource was transferred between multiple resources. +) + +// OrderDirection represents possible directions in which to order a list of items when provided an `orderBy` argument. +type OrderDirection string + +// Possible directions in which to order a list of items when provided an `orderBy` argument. +const ( + OrderDirectionAsc OrderDirection = "ASC" // Specifies an ascending order for a given `orderBy` argument. + OrderDirectionDesc OrderDirection = "DESC" // Specifies a descending order for a given `orderBy` argument. +) + +// OrgAddMemberAuditEntryPermission represents the permissions available to members on an Organization. +type OrgAddMemberAuditEntryPermission string + +// The permissions available to members on an Organization. +const ( + OrgAddMemberAuditEntryPermissionRead OrgAddMemberAuditEntryPermission = "READ" // Can read and clone repositories. + OrgAddMemberAuditEntryPermissionAdmin OrgAddMemberAuditEntryPermission = "ADMIN" // Can read, clone, push, and add collaborators to repositories. +) + +// OrgCreateAuditEntryBillingPlan represents the billing plans available for organizations. +type OrgCreateAuditEntryBillingPlan string + +// The billing plans available for organizations. +const ( + OrgCreateAuditEntryBillingPlanFree OrgCreateAuditEntryBillingPlan = "FREE" // Free Plan. + OrgCreateAuditEntryBillingPlanBusiness OrgCreateAuditEntryBillingPlan = "BUSINESS" // Team Plan. + OrgCreateAuditEntryBillingPlanBusinessPlus OrgCreateAuditEntryBillingPlan = "BUSINESS_PLUS" // Enterprise Cloud Plan. + OrgCreateAuditEntryBillingPlanUnlimited OrgCreateAuditEntryBillingPlan = "UNLIMITED" // Legacy Unlimited Plan. + OrgCreateAuditEntryBillingPlanTieredPerSeat OrgCreateAuditEntryBillingPlan = "TIERED_PER_SEAT" // Tiered Per Seat Plan. +) + +// OrgRemoveBillingManagerAuditEntryReason represents the reason a billing manager was removed from an Organization. +type OrgRemoveBillingManagerAuditEntryReason string + +// The reason a billing manager was removed from an Organization. +const ( + OrgRemoveBillingManagerAuditEntryReasonTwoFactorRequirementNonCompliance OrgRemoveBillingManagerAuditEntryReason = "TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE" // The organization required 2FA of its billing managers and this user did not have 2FA enabled. + OrgRemoveBillingManagerAuditEntryReasonSamlExternalIdentityMissing OrgRemoveBillingManagerAuditEntryReason = "SAML_EXTERNAL_IDENTITY_MISSING" // SAML external identity missing. + OrgRemoveBillingManagerAuditEntryReasonSamlSsoEnforcementRequiresExternalIdentity OrgRemoveBillingManagerAuditEntryReason = "SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY" // SAML SSO enforcement requires an external identity. +) + +// OrgRemoveMemberAuditEntryMembershipType represents the type of membership a user has with an Organization. +type OrgRemoveMemberAuditEntryMembershipType string + +// The type of membership a user has with an Organization. +const ( + OrgRemoveMemberAuditEntryMembershipTypeDirectMember OrgRemoveMemberAuditEntryMembershipType = "DIRECT_MEMBER" // A direct member is a user that is a member of the Organization. + OrgRemoveMemberAuditEntryMembershipTypeAdmin OrgRemoveMemberAuditEntryMembershipType = "ADMIN" // Organization administrators have full access and can change several settings, including the names of repositories that belong to the Organization and Owners team membership. In addition, organization admins can delete the organization and all of its repositories. + OrgRemoveMemberAuditEntryMembershipTypeBillingManager OrgRemoveMemberAuditEntryMembershipType = "BILLING_MANAGER" // A billing manager is a user who manages the billing settings for the Organization, such as updating payment information. + OrgRemoveMemberAuditEntryMembershipTypeUnaffiliated OrgRemoveMemberAuditEntryMembershipType = "UNAFFILIATED" // An unaffiliated collaborator is a person who is not a member of the Organization and does not have access to any repositories in the Organization. + OrgRemoveMemberAuditEntryMembershipTypeOutsideCollaborator OrgRemoveMemberAuditEntryMembershipType = "OUTSIDE_COLLABORATOR" // An outside collaborator is a person who isn't explicitly a member of the Organization, but who has Read, Write, or Admin permissions to one or more repositories in the organization. +) + +// OrgRemoveMemberAuditEntryReason represents the reason a member was removed from an Organization. +type OrgRemoveMemberAuditEntryReason string + +// The reason a member was removed from an Organization. +const ( + OrgRemoveMemberAuditEntryReasonTwoFactorRequirementNonCompliance OrgRemoveMemberAuditEntryReason = "TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE" // The organization required 2FA of its billing managers and this user did not have 2FA enabled. + OrgRemoveMemberAuditEntryReasonSamlExternalIdentityMissing OrgRemoveMemberAuditEntryReason = "SAML_EXTERNAL_IDENTITY_MISSING" // SAML external identity missing. + OrgRemoveMemberAuditEntryReasonSamlSsoEnforcementRequiresExternalIdentity OrgRemoveMemberAuditEntryReason = "SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY" // SAML SSO enforcement requires an external identity. +) + +// OrgRemoveOutsideCollaboratorAuditEntryMembershipType represents the type of membership a user has with an Organization. +type OrgRemoveOutsideCollaboratorAuditEntryMembershipType string + +// The type of membership a user has with an Organization. +const ( + OrgRemoveOutsideCollaboratorAuditEntryMembershipTypeOutsideCollaborator OrgRemoveOutsideCollaboratorAuditEntryMembershipType = "OUTSIDE_COLLABORATOR" // An outside collaborator is a person who isn't explicitly a member of the Organization, but who has Read, Write, or Admin permissions to one or more repositories in the organization. + OrgRemoveOutsideCollaboratorAuditEntryMembershipTypeUnaffiliated OrgRemoveOutsideCollaboratorAuditEntryMembershipType = "UNAFFILIATED" // An unaffiliated collaborator is a person who is not a member of the Organization and does not have access to any repositories in the organization. + OrgRemoveOutsideCollaboratorAuditEntryMembershipTypeBillingManager OrgRemoveOutsideCollaboratorAuditEntryMembershipType = "BILLING_MANAGER" // A billing manager is a user who manages the billing settings for the Organization, such as updating payment information. +) + +// OrgRemoveOutsideCollaboratorAuditEntryReason represents the reason an outside collaborator was removed from an Organization. +type OrgRemoveOutsideCollaboratorAuditEntryReason string + +// The reason an outside collaborator was removed from an Organization. +const ( + OrgRemoveOutsideCollaboratorAuditEntryReasonTwoFactorRequirementNonCompliance OrgRemoveOutsideCollaboratorAuditEntryReason = "TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE" // The organization required 2FA of its billing managers and this user did not have 2FA enabled. + OrgRemoveOutsideCollaboratorAuditEntryReasonSamlExternalIdentityMissing OrgRemoveOutsideCollaboratorAuditEntryReason = "SAML_EXTERNAL_IDENTITY_MISSING" // SAML external identity missing. +) + +// OrgUpdateDefaultRepositoryPermissionAuditEntryPermission represents the default permission a repository can have in an Organization. +type OrgUpdateDefaultRepositoryPermissionAuditEntryPermission string + +// The default permission a repository can have in an Organization. +const ( + OrgUpdateDefaultRepositoryPermissionAuditEntryPermissionRead OrgUpdateDefaultRepositoryPermissionAuditEntryPermission = "READ" // Can read and clone repositories. + OrgUpdateDefaultRepositoryPermissionAuditEntryPermissionWrite OrgUpdateDefaultRepositoryPermissionAuditEntryPermission = "WRITE" // Can read, clone and push to repositories. + OrgUpdateDefaultRepositoryPermissionAuditEntryPermissionAdmin OrgUpdateDefaultRepositoryPermissionAuditEntryPermission = "ADMIN" // Can read, clone, push, and add collaborators to repositories. + OrgUpdateDefaultRepositoryPermissionAuditEntryPermissionNone OrgUpdateDefaultRepositoryPermissionAuditEntryPermission = "NONE" // No default permission value. +) + +// OrgUpdateMemberAuditEntryPermission represents the permissions available to members on an Organization. +type OrgUpdateMemberAuditEntryPermission string + +// The permissions available to members on an Organization. +const ( + OrgUpdateMemberAuditEntryPermissionRead OrgUpdateMemberAuditEntryPermission = "READ" // Can read and clone repositories. + OrgUpdateMemberAuditEntryPermissionAdmin OrgUpdateMemberAuditEntryPermission = "ADMIN" // Can read, clone, push, and add collaborators to repositories. +) + +// OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility represents the permissions available for repository creation on an Organization. +type OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility string + +// The permissions available for repository creation on an Organization. +const ( + OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibilityAll OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility = "ALL" // All organization members are restricted from creating any repositories. + OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibilityPublic OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility = "PUBLIC" // All organization members are restricted from creating public repositories. +) + +// OrganizationInvitationRole represents the possible organization invitation roles. +type OrganizationInvitationRole string + +// The possible organization invitation roles. +const ( + OrganizationInvitationRoleDirectMember OrganizationInvitationRole = "DIRECT_MEMBER" // The user is invited to be a direct member of the organization. + OrganizationInvitationRoleAdmin OrganizationInvitationRole = "ADMIN" // The user is invited to be an admin of the organization. + OrganizationInvitationRoleBillingManager OrganizationInvitationRole = "BILLING_MANAGER" // The user is invited to be a billing manager of the organization. + OrganizationInvitationRoleReinstate OrganizationInvitationRole = "REINSTATE" // The user's previous role will be reinstated. +) + +// OrganizationInvitationType represents the possible organization invitation types. +type OrganizationInvitationType string + +// The possible organization invitation types. +const ( + OrganizationInvitationTypeUser OrganizationInvitationType = "USER" // The invitation was to an existing user. + OrganizationInvitationTypeEmail OrganizationInvitationType = "EMAIL" // The invitation was to an email address. +) + +// OrganizationMemberRole represents the possible roles within an organization for its members. +type OrganizationMemberRole string + +// The possible roles within an organization for its members. +const ( + OrganizationMemberRoleMember OrganizationMemberRole = "MEMBER" // The user is a member of the organization. + OrganizationMemberRoleAdmin OrganizationMemberRole = "ADMIN" // The user is an administrator of the organization. +) + +// OrganizationMembersCanCreateRepositoriesSettingValue represents the possible values for the members can create repositories setting on an organization. +type OrganizationMembersCanCreateRepositoriesSettingValue string + +// The possible values for the members can create repositories setting on an organization. +const ( + OrganizationMembersCanCreateRepositoriesSettingValueAll OrganizationMembersCanCreateRepositoriesSettingValue = "ALL" // Members will be able to create public and private repositories. + OrganizationMembersCanCreateRepositoriesSettingValuePrivate OrganizationMembersCanCreateRepositoriesSettingValue = "PRIVATE" // Members will be able to create only private repositories. + OrganizationMembersCanCreateRepositoriesSettingValueDisabled OrganizationMembersCanCreateRepositoriesSettingValue = "DISABLED" // Members will not be able to create public or private repositories. +) + +// OrganizationOrderField represents properties by which organization connections can be ordered. +type OrganizationOrderField string + +// Properties by which organization connections can be ordered. +const ( + OrganizationOrderFieldCreatedAt OrganizationOrderField = "CREATED_AT" // Order organizations by creation time. + OrganizationOrderFieldLogin OrganizationOrderField = "LOGIN" // Order organizations by login. +) + +// PinnableItemType represents represents items that can be pinned to a profile page or dashboard. +type PinnableItemType string + +// Represents items that can be pinned to a profile page or dashboard. +const ( + PinnableItemTypeRepository PinnableItemType = "REPOSITORY" // A repository. + PinnableItemTypeGist PinnableItemType = "GIST" // A gist. + PinnableItemTypeIssue PinnableItemType = "ISSUE" // An issue. + PinnableItemTypeProject PinnableItemType = "PROJECT" // A project. + PinnableItemTypePullRequest PinnableItemType = "PULL_REQUEST" // A pull request. + PinnableItemTypeUser PinnableItemType = "USER" // A user. + PinnableItemTypeOrganization PinnableItemType = "ORGANIZATION" // An organization. + PinnableItemTypeTeam PinnableItemType = "TEAM" // A team. +) + +// ProjectCardArchivedState represents the possible archived states of a project card. +type ProjectCardArchivedState string + +// The possible archived states of a project card. +const ( + ProjectCardArchivedStateArchived ProjectCardArchivedState = "ARCHIVED" // A project card that is archived. + ProjectCardArchivedStateNotArchived ProjectCardArchivedState = "NOT_ARCHIVED" // A project card that is not archived. +) + +// ProjectCardState represents various content states of a ProjectCard. +type ProjectCardState string + +// Various content states of a ProjectCard. +const ( + ProjectCardStateContentOnly ProjectCardState = "CONTENT_ONLY" // The card has content only. + ProjectCardStateNoteOnly ProjectCardState = "NOTE_ONLY" // The card has a note only. + ProjectCardStateRedacted ProjectCardState = "REDACTED" // The card is redacted. +) + +// ProjectColumnPurpose represents the semantic purpose of the column - todo, in progress, or done. +type ProjectColumnPurpose string + +// The semantic purpose of the column - todo, in progress, or done. +const ( + ProjectColumnPurposeTodo ProjectColumnPurpose = "TODO" // The column contains cards still to be worked on. + ProjectColumnPurposeInProgress ProjectColumnPurpose = "IN_PROGRESS" // The column contains cards which are currently being worked on. + ProjectColumnPurposeDone ProjectColumnPurpose = "DONE" // The column contains cards which are complete. +) + +// ProjectOrderField represents properties by which project connections can be ordered. +type ProjectOrderField string + +// Properties by which project connections can be ordered. +const ( + ProjectOrderFieldCreatedAt ProjectOrderField = "CREATED_AT" // Order projects by creation time. + ProjectOrderFieldUpdatedAt ProjectOrderField = "UPDATED_AT" // Order projects by update time. + ProjectOrderFieldName ProjectOrderField = "NAME" // Order projects by name. +) + +// ProjectState represents state of the project; either 'open' or 'closed'. +type ProjectState string + +// State of the project; either 'open' or 'closed'. +const ( + ProjectStateOpen ProjectState = "OPEN" // The project is open. + ProjectStateClosed ProjectState = "CLOSED" // The project is closed. +) + +// ProjectTemplate represents gitHub-provided templates for Projects. +type ProjectTemplate string + +// GitHub-provided templates for Projects. +const ( + ProjectTemplateBasicKanban ProjectTemplate = "BASIC_KANBAN" // Create a board with columns for To do, In progress and Done. + ProjectTemplateAutomatedKanbanV2 ProjectTemplate = "AUTOMATED_KANBAN_V2" // Create a board with v2 triggers to automatically move cards across To do, In progress and Done columns. + ProjectTemplateAutomatedReviewsKanban ProjectTemplate = "AUTOMATED_REVIEWS_KANBAN" // Create a board with triggers to automatically move cards across columns with review automation. + ProjectTemplateBugTriage ProjectTemplate = "BUG_TRIAGE" // Create a board to triage and prioritize bugs with To do, priority, and Done columns. +) + +// PullRequestMergeMethod represents represents available types of methods to use when merging a pull request. +type PullRequestMergeMethod string + +// Represents available types of methods to use when merging a pull request. +const ( + PullRequestMergeMethodMerge PullRequestMergeMethod = "MERGE" // Add all commits from the head branch to the base branch with a merge commit. + PullRequestMergeMethodSquash PullRequestMergeMethod = "SQUASH" // Combine all commits from the head branch into a single commit in the base branch. + PullRequestMergeMethodRebase PullRequestMergeMethod = "REBASE" // Add all commits from the head branch onto the base branch individually. +) + +// PullRequestOrderField represents properties by which pull_requests connections can be ordered. +type PullRequestOrderField string + +// Properties by which pull_requests connections can be ordered. +const ( + PullRequestOrderFieldCreatedAt PullRequestOrderField = "CREATED_AT" // Order pull_requests by creation time. + PullRequestOrderFieldUpdatedAt PullRequestOrderField = "UPDATED_AT" // Order pull_requests by update time. +) + +// PullRequestPubSubTopic represents the possible PubSub channels for a pull request. +type PullRequestPubSubTopic string + +// The possible PubSub channels for a pull request. +const ( + PullRequestPubSubTopicUpdated PullRequestPubSubTopic = "UPDATED" // The channel ID for observing pull request updates. + PullRequestPubSubTopicMarkasread PullRequestPubSubTopic = "MARKASREAD" // The channel ID for marking an pull request as read. + PullRequestPubSubTopicHeadRef PullRequestPubSubTopic = "HEAD_REF" // The channel ID for observing head ref updates. + PullRequestPubSubTopicTimeline PullRequestPubSubTopic = "TIMELINE" // The channel ID for updating items on the pull request timeline. + PullRequestPubSubTopicState PullRequestPubSubTopic = "STATE" // The channel ID for observing pull request state updates. +) + +// PullRequestReviewCommentState represents the possible states of a pull request review comment. +type PullRequestReviewCommentState string + +// The possible states of a pull request review comment. +const ( + PullRequestReviewCommentStatePending PullRequestReviewCommentState = "PENDING" // A comment that is part of a pending review. + PullRequestReviewCommentStateSubmitted PullRequestReviewCommentState = "SUBMITTED" // A comment that is part of a submitted review. +) + +// PullRequestReviewEvent represents the possible events to perform on a pull request review. +type PullRequestReviewEvent string + +// The possible events to perform on a pull request review. +const ( + PullRequestReviewEventComment PullRequestReviewEvent = "COMMENT" // Submit general feedback without explicit approval. + PullRequestReviewEventApprove PullRequestReviewEvent = "APPROVE" // Submit feedback and approve merging these changes. + PullRequestReviewEventRequestChanges PullRequestReviewEvent = "REQUEST_CHANGES" // Submit feedback that must be addressed before merging. + PullRequestReviewEventDismiss PullRequestReviewEvent = "DISMISS" // Dismiss review so it now longer effects merging. +) + +// PullRequestReviewState represents the possible states of a pull request review. +type PullRequestReviewState string + +// The possible states of a pull request review. +const ( + PullRequestReviewStatePending PullRequestReviewState = "PENDING" // A review that has not yet been submitted. + PullRequestReviewStateCommented PullRequestReviewState = "COMMENTED" // An informational review. + PullRequestReviewStateApproved PullRequestReviewState = "APPROVED" // A review allowing the pull request to merge. + PullRequestReviewStateChangesRequested PullRequestReviewState = "CHANGES_REQUESTED" // A review blocking the pull request from merging. + PullRequestReviewStateDismissed PullRequestReviewState = "DISMISSED" // A review that has been dismissed. +) + +// PullRequestState represents the possible states of a pull request. +type PullRequestState string + +// The possible states of a pull request. +const ( + PullRequestStateOpen PullRequestState = "OPEN" // A pull request that is still open. + PullRequestStateClosed PullRequestState = "CLOSED" // A pull request that has been closed without being merged. + PullRequestStateMerged PullRequestState = "MERGED" // A pull request that has been closed by being merged. +) + +// PullRequestTimelineItemsItemType represents the possible item types found in a timeline. +type PullRequestTimelineItemsItemType string + +// The possible item types found in a timeline. +const ( + PullRequestTimelineItemsItemTypePullRequestCommit PullRequestTimelineItemsItemType = "PULL_REQUEST_COMMIT" // Represents a Git commit part of a pull request. + PullRequestTimelineItemsItemTypePullRequestCommitCommentThread PullRequestTimelineItemsItemType = "PULL_REQUEST_COMMIT_COMMENT_THREAD" // Represents a commit comment thread part of a pull request. + PullRequestTimelineItemsItemTypePullRequestReview PullRequestTimelineItemsItemType = "PULL_REQUEST_REVIEW" // A review object for a given pull request. + PullRequestTimelineItemsItemTypePullRequestReviewThread PullRequestTimelineItemsItemType = "PULL_REQUEST_REVIEW_THREAD" // A threaded list of comments for a given pull request. + PullRequestTimelineItemsItemTypePullRequestRevisionMarker PullRequestTimelineItemsItemType = "PULL_REQUEST_REVISION_MARKER" // Represents the latest point in the pull request timeline for which the viewer has seen the pull request's commits. + PullRequestTimelineItemsItemTypeBaseRefChangedEvent PullRequestTimelineItemsItemType = "BASE_REF_CHANGED_EVENT" // Represents a 'base_ref_changed' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeBaseRefForcePushedEvent PullRequestTimelineItemsItemType = "BASE_REF_FORCE_PUSHED_EVENT" // Represents a 'base_ref_force_pushed' event on a given pull request. + PullRequestTimelineItemsItemTypeDeployedEvent PullRequestTimelineItemsItemType = "DEPLOYED_EVENT" // Represents a 'deployed' event on a given pull request. + PullRequestTimelineItemsItemTypeDeploymentEnvironmentChangedEvent PullRequestTimelineItemsItemType = "DEPLOYMENT_ENVIRONMENT_CHANGED_EVENT" // Represents a 'deployment_environment_changed' event on a given pull request. + PullRequestTimelineItemsItemTypeHeadRefDeletedEvent PullRequestTimelineItemsItemType = "HEAD_REF_DELETED_EVENT" // Represents a 'head_ref_deleted' event on a given pull request. + PullRequestTimelineItemsItemTypeHeadRefForcePushedEvent PullRequestTimelineItemsItemType = "HEAD_REF_FORCE_PUSHED_EVENT" // Represents a 'head_ref_force_pushed' event on a given pull request. + PullRequestTimelineItemsItemTypeHeadRefRestoredEvent PullRequestTimelineItemsItemType = "HEAD_REF_RESTORED_EVENT" // Represents a 'head_ref_restored' event on a given pull request. + PullRequestTimelineItemsItemTypeMergedEvent PullRequestTimelineItemsItemType = "MERGED_EVENT" // Represents a 'merged' event on a given pull request. + PullRequestTimelineItemsItemTypeReviewDismissedEvent PullRequestTimelineItemsItemType = "REVIEW_DISMISSED_EVENT" // Represents a 'review_dismissed' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeReviewRequestedEvent PullRequestTimelineItemsItemType = "REVIEW_REQUESTED_EVENT" // Represents an 'review_requested' event on a given pull request. + PullRequestTimelineItemsItemTypeReviewRequestRemovedEvent PullRequestTimelineItemsItemType = "REVIEW_REQUEST_REMOVED_EVENT" // Represents an 'review_request_removed' event on a given pull request. + PullRequestTimelineItemsItemTypeReadyForReviewEvent PullRequestTimelineItemsItemType = "READY_FOR_REVIEW_EVENT" // Represents a 'ready_for_review' event on a given pull request. + PullRequestTimelineItemsItemTypeIssueComment PullRequestTimelineItemsItemType = "ISSUE_COMMENT" // Represents a comment on an Issue. + PullRequestTimelineItemsItemTypeCrossReferencedEvent PullRequestTimelineItemsItemType = "CROSS_REFERENCED_EVENT" // Represents a mention made by one issue or pull request to another. + PullRequestTimelineItemsItemTypeAddedToProjectEvent PullRequestTimelineItemsItemType = "ADDED_TO_PROJECT_EVENT" // Represents a 'added_to_project' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeAssignedEvent PullRequestTimelineItemsItemType = "ASSIGNED_EVENT" // Represents an 'assigned' event on any assignable object. + PullRequestTimelineItemsItemTypeClosedEvent PullRequestTimelineItemsItemType = "CLOSED_EVENT" // Represents a 'closed' event on any `Closable`. + PullRequestTimelineItemsItemTypeCommentDeletedEvent PullRequestTimelineItemsItemType = "COMMENT_DELETED_EVENT" // Represents a 'comment_deleted' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeConvertedNoteToIssueEvent PullRequestTimelineItemsItemType = "CONVERTED_NOTE_TO_ISSUE_EVENT" // Represents a 'converted_note_to_issue' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeDemilestonedEvent PullRequestTimelineItemsItemType = "DEMILESTONED_EVENT" // Represents a 'demilestoned' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeLabeledEvent PullRequestTimelineItemsItemType = "LABELED_EVENT" // Represents a 'labeled' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeLockedEvent PullRequestTimelineItemsItemType = "LOCKED_EVENT" // Represents a 'locked' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeMarkedAsDuplicateEvent PullRequestTimelineItemsItemType = "MARKED_AS_DUPLICATE_EVENT" // Represents a 'marked_as_duplicate' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeMentionedEvent PullRequestTimelineItemsItemType = "MENTIONED_EVENT" // Represents a 'mentioned' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeMilestonedEvent PullRequestTimelineItemsItemType = "MILESTONED_EVENT" // Represents a 'milestoned' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeMovedColumnsInProjectEvent PullRequestTimelineItemsItemType = "MOVED_COLUMNS_IN_PROJECT_EVENT" // Represents a 'moved_columns_in_project' event on a given issue or pull request. + PullRequestTimelineItemsItemTypePinnedEvent PullRequestTimelineItemsItemType = "PINNED_EVENT" // Represents a 'pinned' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeReferencedEvent PullRequestTimelineItemsItemType = "REFERENCED_EVENT" // Represents a 'referenced' event on a given `ReferencedSubject`. + PullRequestTimelineItemsItemTypeRemovedFromProjectEvent PullRequestTimelineItemsItemType = "REMOVED_FROM_PROJECT_EVENT" // Represents a 'removed_from_project' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeRenamedTitleEvent PullRequestTimelineItemsItemType = "RENAMED_TITLE_EVENT" // Represents a 'renamed' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeReopenedEvent PullRequestTimelineItemsItemType = "REOPENED_EVENT" // Represents a 'reopened' event on any `Closable`. + PullRequestTimelineItemsItemTypeSubscribedEvent PullRequestTimelineItemsItemType = "SUBSCRIBED_EVENT" // Represents a 'subscribed' event on a given `Subscribable`. + PullRequestTimelineItemsItemTypeTransferredEvent PullRequestTimelineItemsItemType = "TRANSFERRED_EVENT" // Represents a 'transferred' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeUnassignedEvent PullRequestTimelineItemsItemType = "UNASSIGNED_EVENT" // Represents an 'unassigned' event on any assignable object. + PullRequestTimelineItemsItemTypeUnlabeledEvent PullRequestTimelineItemsItemType = "UNLABELED_EVENT" // Represents an 'unlabeled' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeUnlockedEvent PullRequestTimelineItemsItemType = "UNLOCKED_EVENT" // Represents an 'unlocked' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeUserBlockedEvent PullRequestTimelineItemsItemType = "USER_BLOCKED_EVENT" // Represents a 'user_blocked' event on a given user. + PullRequestTimelineItemsItemTypeUnpinnedEvent PullRequestTimelineItemsItemType = "UNPINNED_EVENT" // Represents an 'unpinned' event on a given issue or pull request. + PullRequestTimelineItemsItemTypeUnsubscribedEvent PullRequestTimelineItemsItemType = "UNSUBSCRIBED_EVENT" // Represents an 'unsubscribed' event on a given `Subscribable`. +) + +// PullRequestUpdateState represents the possible target states when updating a pull request. +type PullRequestUpdateState string + +// The possible target states when updating a pull request. +const ( + PullRequestUpdateStateOpen PullRequestUpdateState = "OPEN" // A pull request that is still open. + PullRequestUpdateStateClosed PullRequestUpdateState = "CLOSED" // A pull request that has been closed without being merged. +) + +// ReactionContent represents emojis that can be attached to Issues, Pull Requests and Comments. +type ReactionContent string + +// Emojis that can be attached to Issues, Pull Requests and Comments. +const ( + ReactionContentThumbsUp ReactionContent = "THUMBS_UP" // Represents the `:+1:` emoji. + ReactionContentThumbsDown ReactionContent = "THUMBS_DOWN" // Represents the `:-1:` emoji. + ReactionContentLaugh ReactionContent = "LAUGH" // Represents the `:laugh:` emoji. + ReactionContentHooray ReactionContent = "HOORAY" // Represents the `:hooray:` emoji. + ReactionContentConfused ReactionContent = "CONFUSED" // Represents the `:confused:` emoji. + ReactionContentHeart ReactionContent = "HEART" // Represents the `:heart:` emoji. + ReactionContentRocket ReactionContent = "ROCKET" // Represents the `:rocket:` emoji. + ReactionContentEyes ReactionContent = "EYES" // Represents the `:eyes:` emoji. +) + +// ReactionOrderField represents a list of fields that reactions can be ordered by. +type ReactionOrderField string + +// A list of fields that reactions can be ordered by. +const ( + ReactionOrderFieldCreatedAt ReactionOrderField = "CREATED_AT" // Allows ordering a list of reactions by when they were created. +) + +// RefOrderField represents properties by which ref connections can be ordered. +type RefOrderField string + +// Properties by which ref connections can be ordered. +const ( + RefOrderFieldTagCommitDate RefOrderField = "TAG_COMMIT_DATE" // Order refs by underlying commit date if the ref prefix is refs/tags/. + RefOrderFieldAlphabetical RefOrderField = "ALPHABETICAL" // Order refs by their alphanumeric name. +) + +// RegistryPackageDependencyType represents the possible types of a registry package dependency. +type RegistryPackageDependencyType string + +// The possible types of a registry package dependency. +const ( + RegistryPackageDependencyTypeDefault RegistryPackageDependencyType = "DEFAULT" // A default registry package dependency type. + RegistryPackageDependencyTypeDev RegistryPackageDependencyType = "DEV" // A dev registry package dependency type. + RegistryPackageDependencyTypeTest RegistryPackageDependencyType = "TEST" // A test registry package dependency type. + RegistryPackageDependencyTypePeer RegistryPackageDependencyType = "PEER" // A peer registry package dependency type. + RegistryPackageDependencyTypeOptional RegistryPackageDependencyType = "OPTIONAL" // An optional registry package dependency type. + RegistryPackageDependencyTypeBundled RegistryPackageDependencyType = "BUNDLED" // An optional registry package dependency type. +) + +// RegistryPackageFileState represents the possible states of a registry package file. +type RegistryPackageFileState string + +// The possible states of a registry package file. +const ( + RegistryPackageFileStateNew RegistryPackageFileState = "NEW" // Package file doesn't have a blob backing it. + RegistryPackageFileStateUploaded RegistryPackageFileState = "UPLOADED" // All Package file contents have been uploaded. +) + +// RegistryPackageType represents the possible types of a registry package. +type RegistryPackageType string + +// The possible types of a registry package. +const ( + RegistryPackageTypeNpm RegistryPackageType = "NPM" // An npm registry package. + RegistryPackageTypeRubygems RegistryPackageType = "RUBYGEMS" // A rubygems registry package. + RegistryPackageTypeMaven RegistryPackageType = "MAVEN" // A maven registry package. + RegistryPackageTypeDocker RegistryPackageType = "DOCKER" // A docker image. + RegistryPackageTypeDebian RegistryPackageType = "DEBIAN" // A debian package. + RegistryPackageTypeNuget RegistryPackageType = "NUGET" // A nuget package. + RegistryPackageTypePython RegistryPackageType = "PYTHON" // A python package. +) + +// ReleaseOrderField represents properties by which release connections can be ordered. +type ReleaseOrderField string + +// Properties by which release connections can be ordered. +const ( + ReleaseOrderFieldCreatedAt ReleaseOrderField = "CREATED_AT" // Order releases by creation time. + ReleaseOrderFieldName ReleaseOrderField = "NAME" // Order releases alphabetically by name. +) + +// RepoAccessAuditEntryVisibility represents the privacy of a repository. +type RepoAccessAuditEntryVisibility string + +// The privacy of a repository. +const ( + RepoAccessAuditEntryVisibilityInternal RepoAccessAuditEntryVisibility = "INTERNAL" // The repository is visible only to users in the same business. + RepoAccessAuditEntryVisibilityPrivate RepoAccessAuditEntryVisibility = "PRIVATE" // The repository is visible only to those with explicit access. + RepoAccessAuditEntryVisibilityPublic RepoAccessAuditEntryVisibility = "PUBLIC" // The repository is visible to everyone. +) + +// RepoAddMemberAuditEntryVisibility represents the privacy of a repository. +type RepoAddMemberAuditEntryVisibility string + +// The privacy of a repository. +const ( + RepoAddMemberAuditEntryVisibilityInternal RepoAddMemberAuditEntryVisibility = "INTERNAL" // The repository is visible only to users in the same business. + RepoAddMemberAuditEntryVisibilityPrivate RepoAddMemberAuditEntryVisibility = "PRIVATE" // The repository is visible only to those with explicit access. + RepoAddMemberAuditEntryVisibilityPublic RepoAddMemberAuditEntryVisibility = "PUBLIC" // The repository is visible to everyone. +) + +// RepoArchivedAuditEntryVisibility represents the privacy of a repository. +type RepoArchivedAuditEntryVisibility string + +// The privacy of a repository. +const ( + RepoArchivedAuditEntryVisibilityInternal RepoArchivedAuditEntryVisibility = "INTERNAL" // The repository is visible only to users in the same business. + RepoArchivedAuditEntryVisibilityPrivate RepoArchivedAuditEntryVisibility = "PRIVATE" // The repository is visible only to those with explicit access. + RepoArchivedAuditEntryVisibilityPublic RepoArchivedAuditEntryVisibility = "PUBLIC" // The repository is visible to everyone. +) + +// RepoChangeMergeSettingAuditEntryMergeType represents the merge options available for pull requests to this repository. +type RepoChangeMergeSettingAuditEntryMergeType string + +// The merge options available for pull requests to this repository. +const ( + RepoChangeMergeSettingAuditEntryMergeTypeMerge RepoChangeMergeSettingAuditEntryMergeType = "MERGE" // The pull request is added to the base branch in a merge commit. + RepoChangeMergeSettingAuditEntryMergeTypeRebase RepoChangeMergeSettingAuditEntryMergeType = "REBASE" // Commits from the pull request are added onto the base branch individually without a merge commit. + RepoChangeMergeSettingAuditEntryMergeTypeSquash RepoChangeMergeSettingAuditEntryMergeType = "SQUASH" // The pull request's commits are squashed into a single commit before they are merged to the base branch. +) + +// RepoCreateAuditEntryVisibility represents the privacy of a repository. +type RepoCreateAuditEntryVisibility string + +// The privacy of a repository. +const ( + RepoCreateAuditEntryVisibilityInternal RepoCreateAuditEntryVisibility = "INTERNAL" // The repository is visible only to users in the same business. + RepoCreateAuditEntryVisibilityPrivate RepoCreateAuditEntryVisibility = "PRIVATE" // The repository is visible only to those with explicit access. + RepoCreateAuditEntryVisibilityPublic RepoCreateAuditEntryVisibility = "PUBLIC" // The repository is visible to everyone. +) + +// RepoDestroyAuditEntryVisibility represents the privacy of a repository. +type RepoDestroyAuditEntryVisibility string + +// The privacy of a repository. +const ( + RepoDestroyAuditEntryVisibilityInternal RepoDestroyAuditEntryVisibility = "INTERNAL" // The repository is visible only to users in the same business. + RepoDestroyAuditEntryVisibilityPrivate RepoDestroyAuditEntryVisibility = "PRIVATE" // The repository is visible only to those with explicit access. + RepoDestroyAuditEntryVisibilityPublic RepoDestroyAuditEntryVisibility = "PUBLIC" // The repository is visible to everyone. +) + +// RepoRemoveMemberAuditEntryVisibility represents the privacy of a repository. +type RepoRemoveMemberAuditEntryVisibility string + +// The privacy of a repository. +const ( + RepoRemoveMemberAuditEntryVisibilityInternal RepoRemoveMemberAuditEntryVisibility = "INTERNAL" // The repository is visible only to users in the same business. + RepoRemoveMemberAuditEntryVisibilityPrivate RepoRemoveMemberAuditEntryVisibility = "PRIVATE" // The repository is visible only to those with explicit access. + RepoRemoveMemberAuditEntryVisibilityPublic RepoRemoveMemberAuditEntryVisibility = "PUBLIC" // The repository is visible to everyone. +) + +// ReportedContentClassifiers represents the reasons a piece of content can be reported or minimized. +type ReportedContentClassifiers string + +// The reasons a piece of content can be reported or minimized. +const ( + ReportedContentClassifiersSpam ReportedContentClassifiers = "SPAM" // A spammy piece of content. + ReportedContentClassifiersAbuse ReportedContentClassifiers = "ABUSE" // An abusive or harassing piece of content. + ReportedContentClassifiersOffTopic ReportedContentClassifiers = "OFF_TOPIC" // An irrelevant piece of content. + ReportedContentClassifiersOutdated ReportedContentClassifiers = "OUTDATED" // An outdated piece of content. + ReportedContentClassifiersResolved ReportedContentClassifiers = "RESOLVED" // The content has been resolved. +) + +// RepositoryAffiliation represents the affiliation of a user to a repository. +type RepositoryAffiliation string + +// The affiliation of a user to a repository. +const ( + RepositoryAffiliationOwner RepositoryAffiliation = "OWNER" // Repositories that are owned by the authenticated user. + RepositoryAffiliationCollaborator RepositoryAffiliation = "COLLABORATOR" // Repositories that the user has been added to as a collaborator. + RepositoryAffiliationOrganizationMember RepositoryAffiliation = "ORGANIZATION_MEMBER" // Repositories that the user has access to through being a member of an organization. This includes every repository on every team that the user is on. +) + +// RepositoryCollaboratorAffiliation represents the affiliation type between collaborator and repository. +type RepositoryCollaboratorAffiliation string + +// The affiliation type between collaborator and repository. +const ( + RepositoryCollaboratorAffiliationAll RepositoryCollaboratorAffiliation = "ALL" // All collaborators of the repository. + RepositoryCollaboratorAffiliationOutside RepositoryCollaboratorAffiliation = "OUTSIDE" // All outside collaborators of an organization-owned repository. +) + +// RepositoryContributionType represents the reason a repository is listed as 'contributed'. +type RepositoryContributionType string + +// The reason a repository is listed as 'contributed'. +const ( + RepositoryContributionTypeCommit RepositoryContributionType = "COMMIT" // Created a commit. + RepositoryContributionTypeIssue RepositoryContributionType = "ISSUE" // Created an issue. + RepositoryContributionTypePullRequest RepositoryContributionType = "PULL_REQUEST" // Created a pull request. + RepositoryContributionTypeRepository RepositoryContributionType = "REPOSITORY" // Created the repository. + RepositoryContributionTypePullRequestReview RepositoryContributionType = "PULL_REQUEST_REVIEW" // Reviewed a pull request. +) + +// RepositoryInvitationOrderField represents properties by which repository invitation connections can be ordered. +type RepositoryInvitationOrderField string + +// Properties by which repository invitation connections can be ordered. +const ( + RepositoryInvitationOrderFieldCreatedAt RepositoryInvitationOrderField = "CREATED_AT" // Order repository invitations by creation time. + RepositoryInvitationOrderFieldInviteeLogin RepositoryInvitationOrderField = "INVITEE_LOGIN" // Order repository invitations by invitee login. +) + +// RepositoryLockReason represents the possible reasons a given repository could be in a locked state. +type RepositoryLockReason string + +// The possible reasons a given repository could be in a locked state. +const ( + RepositoryLockReasonMoving RepositoryLockReason = "MOVING" // The repository is locked due to a move. + RepositoryLockReasonBilling RepositoryLockReason = "BILLING" // The repository is locked due to a billing related reason. + RepositoryLockReasonRename RepositoryLockReason = "RENAME" // The repository is locked due to a rename. + RepositoryLockReasonMigrating RepositoryLockReason = "MIGRATING" // The repository is locked due to a migration. +) + +// RepositoryOrderField represents properties by which repository connections can be ordered. +type RepositoryOrderField string + +// Properties by which repository connections can be ordered. +const ( + RepositoryOrderFieldCreatedAt RepositoryOrderField = "CREATED_AT" // Order repositories by creation time. + RepositoryOrderFieldUpdatedAt RepositoryOrderField = "UPDATED_AT" // Order repositories by update time. + RepositoryOrderFieldPushedAt RepositoryOrderField = "PUSHED_AT" // Order repositories by push time. + RepositoryOrderFieldName RepositoryOrderField = "NAME" // Order repositories by name. + RepositoryOrderFieldStargazers RepositoryOrderField = "STARGAZERS" // Order repositories by number of stargazers. +) + +// RepositoryPermission represents the access level to a repository. +type RepositoryPermission string + +// The access level to a repository. +const ( + RepositoryPermissionAdmin RepositoryPermission = "ADMIN" // Can read, clone, and push to this repository. Can also manage issues, pull requests, and repository settings, including adding collaborators. + RepositoryPermissionMaintain RepositoryPermission = "MAINTAIN" // Can read, clone, and push to this repository. They can also manage issues, pull requests, and some repository settings. + RepositoryPermissionWrite RepositoryPermission = "WRITE" // Can read, clone, and push to this repository. Can also manage issues and pull requests. + RepositoryPermissionTriage RepositoryPermission = "TRIAGE" // Can read and clone this repository. Can also manage issues and pull requests. + RepositoryPermissionRead RepositoryPermission = "READ" // Can read and clone this repository. Can also open and comment on issues and pull requests. +) + +// RepositoryPrivacy represents the privacy of a repository. +type RepositoryPrivacy string + +// The privacy of a repository. +const ( + RepositoryPrivacyPublic RepositoryPrivacy = "PUBLIC" // Public. + RepositoryPrivacyPrivate RepositoryPrivacy = "PRIVATE" // Private. +) + +// RepositoryVisibility represents the repository's visibility level. +type RepositoryVisibility string + +// The repository's visibility level. +const ( + RepositoryVisibilityPrivate RepositoryVisibility = "PRIVATE" // The repository is visible only to those with explicit access. + RepositoryVisibilityPublic RepositoryVisibility = "PUBLIC" // The repository is visible to everyone. + RepositoryVisibilityInternal RepositoryVisibility = "INTERNAL" // The repository is visible only to users in the same business. +) + +// SamlDigestAlgorithm represents the possible digest algorithms used to sign SAML requests for an identity provider. +type SamlDigestAlgorithm string + +// The possible digest algorithms used to sign SAML requests for an identity provider. +const ( + SamlDigestAlgorithmSha1 SamlDigestAlgorithm = "SHA1" // SHA1. + SamlDigestAlgorithmSha256 SamlDigestAlgorithm = "SHA256" // SHA256. + SamlDigestAlgorithmSha384 SamlDigestAlgorithm = "SHA384" // SHA384. + SamlDigestAlgorithmSha512 SamlDigestAlgorithm = "SHA512" // SHA512. +) + +// SamlSignatureAlgorithm represents the possible signature algorithms used to sign SAML requests for a Identity Provider. +type SamlSignatureAlgorithm string + +// The possible signature algorithms used to sign SAML requests for a Identity Provider. +const ( + SamlSignatureAlgorithmRsaSha1 SamlSignatureAlgorithm = "RSA_SHA1" // RSA-SHA1. + SamlSignatureAlgorithmRsaSha256 SamlSignatureAlgorithm = "RSA_SHA256" // RSA-SHA256. + SamlSignatureAlgorithmRsaSha384 SamlSignatureAlgorithm = "RSA_SHA384" // RSA-SHA384. + SamlSignatureAlgorithmRsaSha512 SamlSignatureAlgorithm = "RSA_SHA512" // RSA-SHA512. +) + +// SavedReplyOrderField represents properties by which saved reply connections can be ordered. +type SavedReplyOrderField string + +// Properties by which saved reply connections can be ordered. +const ( + SavedReplyOrderFieldUpdatedAt SavedReplyOrderField = "UPDATED_AT" // Order saved reply by when they were updated. +) + +// SearchType represents represents the individual results of a search. +type SearchType string + +// Represents the individual results of a search. +const ( + SearchTypeIssue SearchType = "ISSUE" // Returns results matching issues in repositories. + SearchTypeRepository SearchType = "REPOSITORY" // Returns results matching repositories. + SearchTypeUser SearchType = "USER" // Returns results matching users and organizations on GitHub. +) + +// SecurityAdvisoryEcosystem represents the possible ecosystems of a security vulnerability's package. +type SecurityAdvisoryEcosystem string + +// The possible ecosystems of a security vulnerability's package. +const ( + SecurityAdvisoryEcosystemRubygems SecurityAdvisoryEcosystem = "RUBYGEMS" // Ruby gems hosted at RubyGems.org. + SecurityAdvisoryEcosystemNpm SecurityAdvisoryEcosystem = "NPM" // JavaScript packages hosted at npmjs.com. + SecurityAdvisoryEcosystemPip SecurityAdvisoryEcosystem = "PIP" // Python packages hosted at PyPI.org. + SecurityAdvisoryEcosystemMaven SecurityAdvisoryEcosystem = "MAVEN" // Java artifacts hosted at the Maven central repository. + SecurityAdvisoryEcosystemNuget SecurityAdvisoryEcosystem = "NUGET" // .NET packages hosted at the NuGet Gallery. + SecurityAdvisoryEcosystemComposer SecurityAdvisoryEcosystem = "COMPOSER" // PHP packages hosted at packagist.org. +) + +// SecurityAdvisoryIdentifierType represents identifier formats available for advisories. +type SecurityAdvisoryIdentifierType string + +// Identifier formats available for advisories. +const ( + SecurityAdvisoryIdentifierTypeCve SecurityAdvisoryIdentifierType = "CVE" // Common Vulnerabilities and Exposures Identifier. + SecurityAdvisoryIdentifierTypeGhsa SecurityAdvisoryIdentifierType = "GHSA" // GitHub Security Advisory ID. +) + +// SecurityAdvisoryOrderField represents properties by which security advisory connections can be ordered. +type SecurityAdvisoryOrderField string + +// Properties by which security advisory connections can be ordered. +const ( + SecurityAdvisoryOrderFieldPublishedAt SecurityAdvisoryOrderField = "PUBLISHED_AT" // Order advisories by publication time. + SecurityAdvisoryOrderFieldUpdatedAt SecurityAdvisoryOrderField = "UPDATED_AT" // Order advisories by update time. +) + +// SecurityAdvisorySeverity represents severity of the vulnerability. +type SecurityAdvisorySeverity string + +// Severity of the vulnerability. +const ( + SecurityAdvisorySeverityLow SecurityAdvisorySeverity = "LOW" // Low. + SecurityAdvisorySeverityModerate SecurityAdvisorySeverity = "MODERATE" // Moderate. + SecurityAdvisorySeverityHigh SecurityAdvisorySeverity = "HIGH" // High. + SecurityAdvisorySeverityCritical SecurityAdvisorySeverity = "CRITICAL" // Critical. +) + +// SecurityVulnerabilityOrderField represents properties by which security vulnerability connections can be ordered. +type SecurityVulnerabilityOrderField string + +// Properties by which security vulnerability connections can be ordered. +const ( + SecurityVulnerabilityOrderFieldUpdatedAt SecurityVulnerabilityOrderField = "UPDATED_AT" // Order vulnerability by update time. +) + +// SponsorsTierOrderField represents properties by which Sponsors tiers connections can be ordered. +type SponsorsTierOrderField string + +// Properties by which Sponsors tiers connections can be ordered. +const ( + SponsorsTierOrderFieldCreatedAt SponsorsTierOrderField = "CREATED_AT" // Order tiers by creation time. + SponsorsTierOrderFieldMonthlyPriceInCents SponsorsTierOrderField = "MONTHLY_PRICE_IN_CENTS" // Order tiers by their monthly price in cents. +) + +// SponsorshipOrderField represents properties by which sponsorship connections can be ordered. +type SponsorshipOrderField string + +// Properties by which sponsorship connections can be ordered. +const ( + SponsorshipOrderFieldCreatedAt SponsorshipOrderField = "CREATED_AT" // Order sponsorship by creation time. +) + +// SponsorshipPrivacy represents the privacy of a sponsorship. +type SponsorshipPrivacy string + +// The privacy of a sponsorship. +const ( + SponsorshipPrivacyPublic SponsorshipPrivacy = "PUBLIC" // Public. + SponsorshipPrivacyPrivate SponsorshipPrivacy = "PRIVATE" // Private. +) + +// StarOrderField represents properties by which star connections can be ordered. +type StarOrderField string + +// Properties by which star connections can be ordered. +const ( + StarOrderFieldStarredAt StarOrderField = "STARRED_AT" // Allows ordering a list of stars by when they were created. +) + +// StatusState represents the possible commit status states. +type StatusState string + +// The possible commit status states. +const ( + StatusStateExpected StatusState = "EXPECTED" // Status is expected. + StatusStateError StatusState = "ERROR" // Status is errored. + StatusStateFailure StatusState = "FAILURE" // Status is failing. + StatusStatePending StatusState = "PENDING" // Status is pending. + StatusStateSuccess StatusState = "SUCCESS" // Status is successful. +) + +// SubscriptionState represents the possible states of a subscription. +type SubscriptionState string + +// The possible states of a subscription. +const ( + SubscriptionStateUnsubscribed SubscriptionState = "UNSUBSCRIBED" // The User is only notified when participating or @mentioned. + SubscriptionStateSubscribed SubscriptionState = "SUBSCRIBED" // The User is notified of all conversations. + SubscriptionStateIgnored SubscriptionState = "IGNORED" // The User is never notified. +) + +// TeamDiscussionCommentOrderField represents properties by which team discussion comment connections can be ordered. +type TeamDiscussionCommentOrderField string + +// Properties by which team discussion comment connections can be ordered. +const ( + TeamDiscussionCommentOrderFieldNumber TeamDiscussionCommentOrderField = "NUMBER" // Allows sequential ordering of team discussion comments (which is equivalent to chronological ordering). +) + +// TeamDiscussionOrderField represents properties by which team discussion connections can be ordered. +type TeamDiscussionOrderField string + +// Properties by which team discussion connections can be ordered. +const ( + TeamDiscussionOrderFieldCreatedAt TeamDiscussionOrderField = "CREATED_AT" // Allows chronological ordering of team discussions. +) + +// TeamMemberOrderField represents properties by which team member connections can be ordered. +type TeamMemberOrderField string + +// Properties by which team member connections can be ordered. +const ( + TeamMemberOrderFieldLogin TeamMemberOrderField = "LOGIN" // Order team members by login. + TeamMemberOrderFieldCreatedAt TeamMemberOrderField = "CREATED_AT" // Order team members by creation time. +) + +// TeamMemberRole represents the possible team member roles; either 'maintainer' or 'member'. +type TeamMemberRole string + +// The possible team member roles; either 'maintainer' or 'member'. +const ( + TeamMemberRoleMaintainer TeamMemberRole = "MAINTAINER" // A team maintainer has permission to add and remove team members. + TeamMemberRoleMember TeamMemberRole = "MEMBER" // A team member has no administrative permissions on the team. +) + +// TeamMembershipType represents defines which types of team members are included in the returned list. Can be one of IMMEDIATE, CHILD_TEAM or ALL. +type TeamMembershipType string + +// Defines which types of team members are included in the returned list. Can be one of IMMEDIATE, CHILD_TEAM or ALL. +const ( + TeamMembershipTypeImmediate TeamMembershipType = "IMMEDIATE" // Includes only immediate members of the team. + TeamMembershipTypeChildTeam TeamMembershipType = "CHILD_TEAM" // Includes only child team members for the team. + TeamMembershipTypeAll TeamMembershipType = "ALL" // Includes immediate and child team members for the team. +) + +// TeamOrderField represents properties by which team connections can be ordered. +type TeamOrderField string + +// Properties by which team connections can be ordered. +const ( + TeamOrderFieldName TeamOrderField = "NAME" // Allows ordering a list of teams by name. +) + +// TeamPrivacy represents the possible team privacy values. +type TeamPrivacy string + +// The possible team privacy values. +const ( + TeamPrivacySecret TeamPrivacy = "SECRET" // A secret team can only be seen by its members. + TeamPrivacyVisible TeamPrivacy = "VISIBLE" // A visible team can be seen and @mentioned by every member of the organization. +) + +// TeamRepositoryOrderField represents properties by which team repository connections can be ordered. +type TeamRepositoryOrderField string + +// Properties by which team repository connections can be ordered. +const ( + TeamRepositoryOrderFieldCreatedAt TeamRepositoryOrderField = "CREATED_AT" // Order repositories by creation time. + TeamRepositoryOrderFieldUpdatedAt TeamRepositoryOrderField = "UPDATED_AT" // Order repositories by update time. + TeamRepositoryOrderFieldPushedAt TeamRepositoryOrderField = "PUSHED_AT" // Order repositories by push time. + TeamRepositoryOrderFieldName TeamRepositoryOrderField = "NAME" // Order repositories by name. + TeamRepositoryOrderFieldPermission TeamRepositoryOrderField = "PERMISSION" // Order repositories by permission. + TeamRepositoryOrderFieldStargazers TeamRepositoryOrderField = "STARGAZERS" // Order repositories by number of stargazers. +) + +// TeamRole represents the role of a user on a team. +type TeamRole string + +// The role of a user on a team. +const ( + TeamRoleAdmin TeamRole = "ADMIN" // User has admin rights on the team. + TeamRoleMember TeamRole = "MEMBER" // User is a member of the team. +) + +// TopicSuggestionDeclineReason represents reason that the suggested topic is declined. +type TopicSuggestionDeclineReason string + +// Reason that the suggested topic is declined. +const ( + TopicSuggestionDeclineReasonNotRelevant TopicSuggestionDeclineReason = "NOT_RELEVANT" // The suggested topic is not relevant to the repository. + TopicSuggestionDeclineReasonTooSpecific TopicSuggestionDeclineReason = "TOO_SPECIFIC" // The suggested topic is too specific for the repository (e.g. #ruby-on-rails-version-4-2-1). + TopicSuggestionDeclineReasonPersonalPreference TopicSuggestionDeclineReason = "PERSONAL_PREFERENCE" // The viewer does not like the suggested topic. + TopicSuggestionDeclineReasonTooGeneral TopicSuggestionDeclineReason = "TOO_GENERAL" // The suggested topic is too general for the repository. +) + +// UserBlockDuration represents the possible durations that a user can be blocked for. +type UserBlockDuration string + +// The possible durations that a user can be blocked for. +const ( + UserBlockDurationOneDay UserBlockDuration = "ONE_DAY" // The user was blocked for 1 day. + UserBlockDurationThreeDays UserBlockDuration = "THREE_DAYS" // The user was blocked for 3 days. + UserBlockDurationOneWeek UserBlockDuration = "ONE_WEEK" // The user was blocked for 7 days. + UserBlockDurationOneMonth UserBlockDuration = "ONE_MONTH" // The user was blocked for 30 days. + UserBlockDurationPermanent UserBlockDuration = "PERMANENT" // The user was blocked permanently. +) + +// UserStatusOrderField represents properties by which user status connections can be ordered. +type UserStatusOrderField string + +// Properties by which user status connections can be ordered. +const ( + UserStatusOrderFieldUpdatedAt UserStatusOrderField = "UPDATED_AT" // Order user statuses by when they were updated. +) diff --git a/vendor/github.com/shurcooL/githubv4/githubv4.go b/vendor/github.com/shurcooL/githubv4/githubv4.go new file mode 100644 index 0000000000..3a544bf2b0 --- /dev/null +++ b/vendor/github.com/shurcooL/githubv4/githubv4.go @@ -0,0 +1,56 @@ +package githubv4 + +import ( + "context" + "net/http" + + "github.com/shurcooL/graphql" +) + +// Client is a GitHub GraphQL API v4 client. +type Client struct { + client *graphql.Client +} + +// NewClient creates a new GitHub GraphQL API v4 client with the provided http.Client. +// If httpClient is nil, then http.DefaultClient is used. +// +// Note that GitHub GraphQL API v4 requires authentication, so +// the provided http.Client is expected to take care of that. +func NewClient(httpClient *http.Client) *Client { + return &Client{ + client: graphql.NewClient("https://api.github.com/graphql", httpClient), + } +} + +// NewEnterpriseClient creates a new GitHub GraphQL API v4 client for the GitHub Enterprise +// instance with the specified GraphQL endpoint URL, using the provided http.Client. +// If httpClient is nil, then http.DefaultClient is used. +// +// Note that GitHub GraphQL API v4 requires authentication, so +// the provided http.Client is expected to take care of that. +func NewEnterpriseClient(url string, httpClient *http.Client) *Client { + return &Client{ + client: graphql.NewClient(url, httpClient), + } +} + +// Query executes a single GraphQL query request, +// with a query derived from q, populating the response into it. +// q should be a pointer to struct that corresponds to the GitHub GraphQL schema. +func (c *Client) Query(ctx context.Context, q interface{}, variables map[string]interface{}) error { + return c.client.Query(ctx, q, variables) +} + +// Mutate executes a single GraphQL mutation request, +// with a mutation derived from m, populating the response into it. +// m should be a pointer to struct that corresponds to the GitHub GraphQL schema. +// Provided input will be set as a variable named "input". +func (c *Client) Mutate(ctx context.Context, m interface{}, input Input, variables map[string]interface{}) error { + if variables == nil { + variables = map[string]interface{}{"input": input} + } else { + variables["input"] = input + } + return c.client.Mutate(ctx, m, variables) +} diff --git a/vendor/github.com/shurcooL/githubv4/input.go b/vendor/github.com/shurcooL/githubv4/input.go new file mode 100644 index 0000000000..91555f4e46 --- /dev/null +++ b/vendor/github.com/shurcooL/githubv4/input.go @@ -0,0 +1,1751 @@ +// Code generated by gen.go; DO NOT EDIT. + +package githubv4 + +// Input represents one of the Input structs: +// +// AcceptEnterpriseAdministratorInvitationInput, AcceptTopicSuggestionInput, AddAssigneesToAssignableInput, AddCommentInput, AddLabelsToLabelableInput, AddProjectCardInput, AddProjectColumnInput, AddPullRequestReviewCommentInput, AddPullRequestReviewInput, AddReactionInput, AddStarInput, ArchiveRepositoryInput, AuditLogOrder, CancelEnterpriseAdminInvitationInput, ChangeUserStatusInput, ClearLabelsFromLabelableInput, CloneProjectInput, CloneTemplateRepositoryInput, CloseIssueInput, ClosePullRequestInput, CommitAuthor, CommitContributionOrder, ContributionOrder, ConvertProjectCardNoteToIssueInput, CreateBranchProtectionRuleInput, CreateContentAttachmentInput, CreateEnterpriseOrganizationInput, CreateIssueInput, CreateProjectInput, CreatePullRequestInput, CreateRefInput, CreateRepositoryInput, CreateTeamDiscussionCommentInput, CreateTeamDiscussionInput, DeclineTopicSuggestionInput, DeleteBranchProtectionRuleInput, DeleteIssueCommentInput, DeleteIssueInput, DeletePackageVersionInput, DeleteProjectCardInput, DeleteProjectColumnInput, DeleteProjectInput, DeletePullRequestReviewCommentInput, DeletePullRequestReviewInput, DeleteRefInput, DeleteTeamDiscussionCommentInput, DeleteTeamDiscussionInput, DeploymentOrder, DismissPullRequestReviewInput, DraftPullRequestReviewComment, EnterpriseAdministratorInvitationOrder, EnterpriseMemberOrder, EnterpriseOrder, EnterpriseServerInstallationOrder, EnterpriseServerUserAccountEmailOrder, EnterpriseServerUserAccountOrder, EnterpriseServerUserAccountsUploadOrder, FollowUserInput, GistOrder, ImportProjectInput, InviteEnterpriseAdminInput, IssueFilters, IssueOrder, LanguageOrder, LinkRepositoryToProjectInput, LockLockableInput, MergeBranchInput, MergePullRequestInput, MilestoneOrder, MinimizeCommentInput, MoveProjectCardInput, MoveProjectColumnInput, OrganizationOrder, PinIssueInput, ProjectCardImport, ProjectColumnImport, ProjectOrder, PullRequestOrder, ReactionOrder, RefOrder, RegenerateEnterpriseIdentityProviderRecoveryCodesInput, RegistryPackageMetadatum, ReleaseOrder, RemoveAssigneesFromAssignableInput, RemoveEnterpriseAdminInput, RemoveEnterpriseOrganizationInput, RemoveLabelsFromLabelableInput, RemoveOutsideCollaboratorInput, RemoveReactionInput, RemoveStarInput, ReopenIssueInput, ReopenPullRequestInput, RepositoryInvitationOrder, RepositoryOrder, RequestReviewsInput, ResolveReviewThreadInput, SavedReplyOrder, SecurityAdvisoryIdentifierFilter, SecurityAdvisoryOrder, SecurityVulnerabilityOrder, SponsorsTierOrder, SponsorshipOrder, StarOrder, SubmitPullRequestReviewInput, TeamDiscussionCommentOrder, TeamDiscussionOrder, TeamMemberOrder, TeamOrder, TeamRepositoryOrder, TransferIssueInput, UnarchiveRepositoryInput, UnfollowUserInput, UnlinkRepositoryFromProjectInput, UnlockLockableInput, UnmarkIssueAsDuplicateInput, UnminimizeCommentInput, UnpinIssueInput, UnresolveReviewThreadInput, UpdateBranchProtectionRuleInput, UpdateEnterpriseActionExecutionCapabilitySettingInput, UpdateEnterpriseAdministratorRoleInput, UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput, UpdateEnterpriseDefaultRepositoryPermissionSettingInput, UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput, UpdateEnterpriseMembersCanCreateRepositoriesSettingInput, UpdateEnterpriseMembersCanDeleteIssuesSettingInput, UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput, UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput, UpdateEnterpriseMembersCanMakePurchasesSettingInput, UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput, UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput, UpdateEnterpriseOrganizationProjectsSettingInput, UpdateEnterpriseProfileInput, UpdateEnterpriseRepositoryProjectsSettingInput, UpdateEnterpriseTeamDiscussionsSettingInput, UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput, UpdateIssueCommentInput, UpdateIssueInput, UpdateProjectCardInput, UpdateProjectColumnInput, UpdateProjectInput, UpdatePullRequestInput, UpdatePullRequestReviewCommentInput, UpdatePullRequestReviewInput, UpdateRefInput, UpdateRepositoryInput, UpdateSubscriptionInput, UpdateTeamDiscussionCommentInput, UpdateTeamDiscussionInput, UpdateTopicsInput, UserStatusOrder. +type Input interface{} + +// AcceptEnterpriseAdministratorInvitationInput is an autogenerated input type of AcceptEnterpriseAdministratorInvitation. +type AcceptEnterpriseAdministratorInvitationInput struct { + // The id of the invitation being accepted. (Required.) + InvitationID ID `json:"invitationId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AcceptTopicSuggestionInput is an autogenerated input type of AcceptTopicSuggestion. +type AcceptTopicSuggestionInput struct { + // The Node ID of the repository. (Required.) + RepositoryID ID `json:"repositoryId"` + // The name of the suggested topic. (Required.) + Name String `json:"name"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddAssigneesToAssignableInput is an autogenerated input type of AddAssigneesToAssignable. +type AddAssigneesToAssignableInput struct { + // The id of the assignable object to add assignees to. (Required.) + AssignableID ID `json:"assignableId"` + // The id of users to add as assignees. (Required.) + AssigneeIDs []ID `json:"assigneeIds"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddCommentInput is an autogenerated input type of AddComment. +type AddCommentInput struct { + // The Node ID of the subject to modify. (Required.) + SubjectID ID `json:"subjectId"` + // The contents of the comment. (Required.) + Body String `json:"body"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddLabelsToLabelableInput is an autogenerated input type of AddLabelsToLabelable. +type AddLabelsToLabelableInput struct { + // The id of the labelable object to add labels to. (Required.) + LabelableID ID `json:"labelableId"` + // The ids of the labels to add. (Required.) + LabelIDs []ID `json:"labelIds"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddProjectCardInput is an autogenerated input type of AddProjectCard. +type AddProjectCardInput struct { + // The Node ID of the ProjectColumn. (Required.) + ProjectColumnID ID `json:"projectColumnId"` + + // The content of the card. Must be a member of the ProjectCardItem union. (Optional.) + ContentID *ID `json:"contentId,omitempty"` + // The note on the card. (Optional.) + Note *String `json:"note,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddProjectColumnInput is an autogenerated input type of AddProjectColumn. +type AddProjectColumnInput struct { + // The Node ID of the project. (Required.) + ProjectID ID `json:"projectId"` + // The name of the column. (Required.) + Name String `json:"name"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddPullRequestReviewCommentInput is an autogenerated input type of AddPullRequestReviewComment. +type AddPullRequestReviewCommentInput struct { + // The Node ID of the review to modify. (Required.) + PullRequestReviewID ID `json:"pullRequestReviewId"` + // The text of the comment. (Required.) + Body String `json:"body"` + + // The SHA of the commit to comment on. (Optional.) + CommitOID *GitObjectID `json:"commitOID,omitempty"` + // The relative path of the file to comment on. (Optional.) + Path *String `json:"path,omitempty"` + // The line index in the diff to comment on. (Optional.) + Position *Int `json:"position,omitempty"` + // The comment id to reply to. (Optional.) + InReplyTo *ID `json:"inReplyTo,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddPullRequestReviewInput is an autogenerated input type of AddPullRequestReview. +type AddPullRequestReviewInput struct { + // The Node ID of the pull request to modify. (Required.) + PullRequestID ID `json:"pullRequestId"` + + // The commit OID the review pertains to. (Optional.) + CommitOID *GitObjectID `json:"commitOID,omitempty"` + // The contents of the review body comment. (Optional.) + Body *String `json:"body,omitempty"` + // The event to perform on the pull request review. (Optional.) + Event *PullRequestReviewEvent `json:"event,omitempty"` + // The review line comments. (Optional.) + Comments *[]*DraftPullRequestReviewComment `json:"comments,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddReactionInput is an autogenerated input type of AddReaction. +type AddReactionInput struct { + // The Node ID of the subject to modify. (Required.) + SubjectID ID `json:"subjectId"` + // The name of the emoji to react with. (Required.) + Content ReactionContent `json:"content"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AddStarInput is an autogenerated input type of AddStar. +type AddStarInput struct { + // The Starrable ID to star. (Required.) + StarrableID ID `json:"starrableId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// ArchiveRepositoryInput is an autogenerated input type of ArchiveRepository. +type ArchiveRepositoryInput struct { + // The ID of the repository to mark as archived. (Required.) + RepositoryID ID `json:"repositoryId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// AuditLogOrder represents ordering options for Audit Log connections. +type AuditLogOrder struct { + + // The field to order Audit Logs by. (Optional.) + Field *AuditLogOrderField `json:"field,omitempty"` + // The ordering direction. (Optional.) + Direction *OrderDirection `json:"direction,omitempty"` +} + +// CancelEnterpriseAdminInvitationInput is an autogenerated input type of CancelEnterpriseAdminInvitation. +type CancelEnterpriseAdminInvitationInput struct { + // The Node ID of the pending enterprise administrator invitation. (Required.) + InvitationID ID `json:"invitationId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// ChangeUserStatusInput is an autogenerated input type of ChangeUserStatus. +type ChangeUserStatusInput struct { + + // The emoji to represent your status. Can either be a native Unicode emoji or an emoji name with colons, e.g., :grinning:. (Optional.) + Emoji *String `json:"emoji,omitempty"` + // A short description of your current status. (Optional.) + Message *String `json:"message,omitempty"` + // The ID of the organization whose members will be allowed to see the status. If omitted, the status will be publicly visible. (Optional.) + OrganizationID *ID `json:"organizationId,omitempty"` + // Whether this status should indicate you are not fully available on GitHub, e.g., you are away. (Optional.) + LimitedAvailability *Boolean `json:"limitedAvailability,omitempty"` + // If set, the user status will not be shown after this date. (Optional.) + ExpiresAt *DateTime `json:"expiresAt,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// ClearLabelsFromLabelableInput is an autogenerated input type of ClearLabelsFromLabelable. +type ClearLabelsFromLabelableInput struct { + // The id of the labelable object to clear the labels from. (Required.) + LabelableID ID `json:"labelableId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CloneProjectInput is an autogenerated input type of CloneProject. +type CloneProjectInput struct { + // The owner ID to create the project under. (Required.) + TargetOwnerID ID `json:"targetOwnerId"` + // The source project to clone. (Required.) + SourceID ID `json:"sourceId"` + // Whether or not to clone the source project's workflows. (Required.) + IncludeWorkflows Boolean `json:"includeWorkflows"` + // The name of the project. (Required.) + Name String `json:"name"` + + // The description of the project. (Optional.) + Body *String `json:"body,omitempty"` + // The visibility of the project, defaults to false (private). (Optional.) + Public *Boolean `json:"public,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CloneTemplateRepositoryInput is an autogenerated input type of CloneTemplateRepository. +type CloneTemplateRepositoryInput struct { + // The Node ID of the template repository. (Required.) + RepositoryID ID `json:"repositoryId"` + // The name of the new repository. (Required.) + Name String `json:"name"` + // The ID of the owner for the new repository. (Required.) + OwnerID ID `json:"ownerId"` + // Indicates the repository's visibility level. (Required.) + Visibility RepositoryVisibility `json:"visibility"` + + // A short description of the new repository. (Optional.) + Description *String `json:"description,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CloseIssueInput is an autogenerated input type of CloseIssue. +type CloseIssueInput struct { + // ID of the issue to be closed. (Required.) + IssueID ID `json:"issueId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// ClosePullRequestInput is an autogenerated input type of ClosePullRequest. +type ClosePullRequestInput struct { + // ID of the pull request to be closed. (Required.) + PullRequestID ID `json:"pullRequestId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CommitAuthor specifies an author for filtering Git commits. +type CommitAuthor struct { + + // ID of a User to filter by. If non-null, only commits authored by this user will be returned. This field takes precedence over emails. (Optional.) + ID *ID `json:"id,omitempty"` + // Email addresses to filter by. Commits authored by any of the specified email addresses will be returned. (Optional.) + Emails *[]String `json:"emails,omitempty"` +} + +// CommitContributionOrder represents ordering options for commit contribution connections. +type CommitContributionOrder struct { + // The field by which to order commit contributions. (Required.) + Field CommitContributionOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// ContributionOrder represents ordering options for contribution connections. +type ContributionOrder struct { + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` + + // The field by which to order contributions. **Upcoming Change on 2019-10-01 UTC** **Description:** `field` will be removed. Only one order field is supported. **Reason:** `field` will be removed. (Optional.) + Field *ContributionOrderField `json:"field,omitempty"` +} + +// ConvertProjectCardNoteToIssueInput is an autogenerated input type of ConvertProjectCardNoteToIssue. +type ConvertProjectCardNoteToIssueInput struct { + // The ProjectCard ID to convert. (Required.) + ProjectCardID ID `json:"projectCardId"` + // The ID of the repository to create the issue in. (Required.) + RepositoryID ID `json:"repositoryId"` + + // The title of the newly created issue. Defaults to the card's note text. (Optional.) + Title *String `json:"title,omitempty"` + // The body of the newly created issue. (Optional.) + Body *String `json:"body,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateBranchProtectionRuleInput is an autogenerated input type of CreateBranchProtectionRule. +type CreateBranchProtectionRuleInput struct { + // The global relay id of the repository in which a new branch protection rule should be created in. (Required.) + RepositoryID ID `json:"repositoryId"` + // The glob-like pattern used to determine matching branches. (Required.) + Pattern String `json:"pattern"` + + // Are approving reviews required to update matching branches. (Optional.) + RequiresApprovingReviews *Boolean `json:"requiresApprovingReviews,omitempty"` + // Number of approving reviews required to update matching branches. (Optional.) + RequiredApprovingReviewCount *Int `json:"requiredApprovingReviewCount,omitempty"` + // Are commits required to be signed. (Optional.) + RequiresCommitSignatures *Boolean `json:"requiresCommitSignatures,omitempty"` + // Can admins overwrite branch protection. (Optional.) + IsAdminEnforced *Boolean `json:"isAdminEnforced,omitempty"` + // Are status checks required to update matching branches. (Optional.) + RequiresStatusChecks *Boolean `json:"requiresStatusChecks,omitempty"` + // Are branches required to be up to date before merging. (Optional.) + RequiresStrictStatusChecks *Boolean `json:"requiresStrictStatusChecks,omitempty"` + // Are reviews from code owners required to update matching branches. (Optional.) + RequiresCodeOwnerReviews *Boolean `json:"requiresCodeOwnerReviews,omitempty"` + // Will new commits pushed to matching branches dismiss pull request review approvals. (Optional.) + DismissesStaleReviews *Boolean `json:"dismissesStaleReviews,omitempty"` + // Is dismissal of pull request reviews restricted. (Optional.) + RestrictsReviewDismissals *Boolean `json:"restrictsReviewDismissals,omitempty"` + // A list of User or Team IDs allowed to dismiss reviews on pull requests targeting matching branches. (Optional.) + ReviewDismissalActorIDs *[]ID `json:"reviewDismissalActorIds,omitempty"` + // Is pushing to matching branches restricted. (Optional.) + RestrictsPushes *Boolean `json:"restrictsPushes,omitempty"` + // A list of User, Team or App IDs allowed to push to matching branches. (Optional.) + PushActorIDs *[]ID `json:"pushActorIds,omitempty"` + // List of required status check contexts that must pass for commits to be accepted to matching branches. (Optional.) + RequiredStatusCheckContexts *[]String `json:"requiredStatusCheckContexts,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateContentAttachmentInput is an autogenerated input type of CreateContentAttachment. +type CreateContentAttachmentInput struct { + // The node ID of the content_reference. (Required.) + ContentReferenceID ID `json:"contentReferenceId"` + // The title of the content attachment. (Required.) + Title String `json:"title"` + // The body of the content attachment, which may contain markdown. (Required.) + Body String `json:"body"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateEnterpriseOrganizationInput is an autogenerated input type of CreateEnterpriseOrganization. +type CreateEnterpriseOrganizationInput struct { + // The ID of the enterprise owning the new organization. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The login of the new organization. (Required.) + Login String `json:"login"` + // The profile name of the new organization. (Required.) + ProfileName String `json:"profileName"` + // The email used for sending billing receipts. (Required.) + BillingEmail String `json:"billingEmail"` + // The logins for the administrators of the new organization. (Required.) + AdminLogins []String `json:"adminLogins"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateIssueInput is an autogenerated input type of CreateIssue. +type CreateIssueInput struct { + // The Node ID of the repository. (Required.) + RepositoryID ID `json:"repositoryId"` + // The title for the issue. (Required.) + Title String `json:"title"` + + // The body for the issue description. (Optional.) + Body *String `json:"body,omitempty"` + // The Node ID for the user assignee for this issue. (Optional.) + AssigneeIDs *[]ID `json:"assigneeIds,omitempty"` + // The Node ID of the milestone for this issue. (Optional.) + MilestoneID *ID `json:"milestoneId,omitempty"` + // An array of Node IDs of labels for this issue. (Optional.) + LabelIDs *[]ID `json:"labelIds,omitempty"` + // An array of Node IDs for projects associated with this issue. (Optional.) + ProjectIDs *[]ID `json:"projectIds,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateProjectInput is an autogenerated input type of CreateProject. +type CreateProjectInput struct { + // The owner ID to create the project under. (Required.) + OwnerID ID `json:"ownerId"` + // The name of project. (Required.) + Name String `json:"name"` + + // The description of project. (Optional.) + Body *String `json:"body,omitempty"` + // The name of the GitHub-provided template. (Optional.) + Template *ProjectTemplate `json:"template,omitempty"` + // A list of repository IDs to create as linked repositories for the project. (Optional.) + RepositoryIDs *[]ID `json:"repositoryIds,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreatePullRequestInput is an autogenerated input type of CreatePullRequest. +type CreatePullRequestInput struct { + // The Node ID of the repository. (Required.) + RepositoryID ID `json:"repositoryId"` + // The name of the branch you want your changes pulled into. This should be an existing branch on the current repository. You cannot update the base branch on a pull request to point to another repository. (Required.) + BaseRefName String `json:"baseRefName"` + // The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace `head_ref_name` with a user like this: `username:branch`. (Required.) + HeadRefName String `json:"headRefName"` + // The title of the pull request. (Required.) + Title String `json:"title"` + + // The contents of the pull request. (Optional.) + Body *String `json:"body,omitempty"` + // Indicates whether maintainers can modify the pull request. (Optional.) + MaintainerCanModify *Boolean `json:"maintainerCanModify,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateRefInput is an autogenerated input type of CreateRef. +type CreateRefInput struct { + // The Node ID of the Repository to create the Ref in. (Required.) + RepositoryID ID `json:"repositoryId"` + // The fully qualified name of the new Ref (ie: `refs/heads/my_new_branch`). (Required.) + Name String `json:"name"` + // The GitObjectID that the new Ref shall target. Must point to a commit. (Required.) + Oid GitObjectID `json:"oid"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateRepositoryInput is an autogenerated input type of CreateRepository. +type CreateRepositoryInput struct { + // The name of the new repository. (Required.) + Name String `json:"name"` + // Indicates the repository's visibility level. (Required.) + Visibility RepositoryVisibility `json:"visibility"` + + // The ID of the owner for the new repository. (Optional.) + OwnerID *ID `json:"ownerId,omitempty"` + // A short description of the new repository. (Optional.) + Description *String `json:"description,omitempty"` + // Whether this repository should be marked as a template such that anyone who can access it can create new repositories with the same files and directory structure. (Optional.) + Template *Boolean `json:"template,omitempty"` + // The URL for a web page about this repository. (Optional.) + HomepageURL *URI `json:"homepageUrl,omitempty"` + // Indicates if the repository should have the wiki feature enabled. (Optional.) + HasWikiEnabled *Boolean `json:"hasWikiEnabled,omitempty"` + // Indicates if the repository should have the issues feature enabled. (Optional.) + HasIssuesEnabled *Boolean `json:"hasIssuesEnabled,omitempty"` + // When an organization is specified as the owner, this ID identifies the team that should be granted access to the new repository. (Optional.) + TeamID *ID `json:"teamId,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateTeamDiscussionCommentInput is an autogenerated input type of CreateTeamDiscussionComment. +type CreateTeamDiscussionCommentInput struct { + // The ID of the discussion to which the comment belongs. (Required.) + DiscussionID ID `json:"discussionId"` + // The content of the comment. (Required.) + Body String `json:"body"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// CreateTeamDiscussionInput is an autogenerated input type of CreateTeamDiscussion. +type CreateTeamDiscussionInput struct { + // The ID of the team to which the discussion belongs. (Required.) + TeamID ID `json:"teamId"` + // The title of the discussion. (Required.) + Title String `json:"title"` + // The content of the discussion. (Required.) + Body String `json:"body"` + + // If true, restricts the visiblity of this discussion to team members and organization admins. If false or not specified, allows any organization member to view this discussion. (Optional.) + Private *Boolean `json:"private,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeclineTopicSuggestionInput is an autogenerated input type of DeclineTopicSuggestion. +type DeclineTopicSuggestionInput struct { + // The Node ID of the repository. (Required.) + RepositoryID ID `json:"repositoryId"` + // The name of the suggested topic. (Required.) + Name String `json:"name"` + // The reason why the suggested topic is declined. (Required.) + Reason TopicSuggestionDeclineReason `json:"reason"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteBranchProtectionRuleInput is an autogenerated input type of DeleteBranchProtectionRule. +type DeleteBranchProtectionRuleInput struct { + // The global relay id of the branch protection rule to be deleted. (Required.) + BranchProtectionRuleID ID `json:"branchProtectionRuleId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteIssueCommentInput is an autogenerated input type of DeleteIssueComment. +type DeleteIssueCommentInput struct { + // The ID of the comment to delete. (Required.) + ID ID `json:"id"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteIssueInput is an autogenerated input type of DeleteIssue. +type DeleteIssueInput struct { + // The ID of the issue to delete. (Required.) + IssueID ID `json:"issueId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeletePackageVersionInput is an autogenerated input type of DeletePackageVersion. +type DeletePackageVersionInput struct { + // The ID of the package version to be deleted. (Required.) + PackageVersionID ID `json:"packageVersionId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteProjectCardInput is an autogenerated input type of DeleteProjectCard. +type DeleteProjectCardInput struct { + // The id of the card to delete. (Required.) + CardID ID `json:"cardId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteProjectColumnInput is an autogenerated input type of DeleteProjectColumn. +type DeleteProjectColumnInput struct { + // The id of the column to delete. (Required.) + ColumnID ID `json:"columnId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteProjectInput is an autogenerated input type of DeleteProject. +type DeleteProjectInput struct { + // The Project ID to update. (Required.) + ProjectID ID `json:"projectId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeletePullRequestReviewCommentInput is an autogenerated input type of DeletePullRequestReviewComment. +type DeletePullRequestReviewCommentInput struct { + // The ID of the comment to delete. (Required.) + ID ID `json:"id"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeletePullRequestReviewInput is an autogenerated input type of DeletePullRequestReview. +type DeletePullRequestReviewInput struct { + // The Node ID of the pull request review to delete. (Required.) + PullRequestReviewID ID `json:"pullRequestReviewId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteRefInput is an autogenerated input type of DeleteRef. +type DeleteRefInput struct { + // The Node ID of the Ref to be deleted. (Required.) + RefID ID `json:"refId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteTeamDiscussionCommentInput is an autogenerated input type of DeleteTeamDiscussionComment. +type DeleteTeamDiscussionCommentInput struct { + // The ID of the comment to delete. (Required.) + ID ID `json:"id"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeleteTeamDiscussionInput is an autogenerated input type of DeleteTeamDiscussion. +type DeleteTeamDiscussionInput struct { + // The discussion ID to delete. (Required.) + ID ID `json:"id"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DeploymentOrder represents ordering options for deployment connections. +type DeploymentOrder struct { + // The field to order deployments by. (Required.) + Field DeploymentOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// DismissPullRequestReviewInput is an autogenerated input type of DismissPullRequestReview. +type DismissPullRequestReviewInput struct { + // The Node ID of the pull request review to modify. (Required.) + PullRequestReviewID ID `json:"pullRequestReviewId"` + // The contents of the pull request review dismissal message. (Required.) + Message String `json:"message"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// DraftPullRequestReviewComment specifies a review comment to be left with a Pull Request Review. +type DraftPullRequestReviewComment struct { + // Path to the file being commented on. (Required.) + Path String `json:"path"` + // Position in the file to leave a comment on. (Required.) + Position Int `json:"position"` + // Body of the comment to leave. (Required.) + Body String `json:"body"` +} + +// EnterpriseAdministratorInvitationOrder represents ordering options for enterprise administrator invitation connections. +type EnterpriseAdministratorInvitationOrder struct { + // The field to order enterprise administrator invitations by. (Required.) + Field EnterpriseAdministratorInvitationOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// EnterpriseMemberOrder represents ordering options for enterprise member connections. +type EnterpriseMemberOrder struct { + // The field to order enterprise members by. (Required.) + Field EnterpriseMemberOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// EnterpriseOrder represents ordering options for enterprises. +type EnterpriseOrder struct { + // The field to order enterprises by. (Required.) + Field EnterpriseOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// EnterpriseServerInstallationOrder represents ordering options for Enterprise Server installation connections. +type EnterpriseServerInstallationOrder struct { + // The field to order Enterprise Server installations by. (Required.) + Field EnterpriseServerInstallationOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// EnterpriseServerUserAccountEmailOrder represents ordering options for Enterprise Server user account email connections. +type EnterpriseServerUserAccountEmailOrder struct { + // The field to order emails by. (Required.) + Field EnterpriseServerUserAccountEmailOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// EnterpriseServerUserAccountOrder represents ordering options for Enterprise Server user account connections. +type EnterpriseServerUserAccountOrder struct { + // The field to order user accounts by. (Required.) + Field EnterpriseServerUserAccountOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// EnterpriseServerUserAccountsUploadOrder represents ordering options for Enterprise Server user accounts upload connections. +type EnterpriseServerUserAccountsUploadOrder struct { + // The field to order user accounts uploads by. (Required.) + Field EnterpriseServerUserAccountsUploadOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// FollowUserInput is an autogenerated input type of FollowUser. +type FollowUserInput struct { + // ID of the user to follow. (Required.) + UserID ID `json:"userId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// GistOrder represents ordering options for gist connections. +type GistOrder struct { + // The field to order repositories by. (Required.) + Field GistOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// ImportProjectInput is an autogenerated input type of ImportProject. +type ImportProjectInput struct { + // The name of the Organization or User to create the Project under. (Required.) + OwnerName String `json:"ownerName"` + // The name of Project. (Required.) + Name String `json:"name"` + // A list of columns containing issues and pull requests. (Required.) + ColumnImports []ProjectColumnImport `json:"columnImports"` + + // The description of Project. (Optional.) + Body *String `json:"body,omitempty"` + // Whether the Project is public or not. (Optional.) + Public *Boolean `json:"public,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// InviteEnterpriseAdminInput is an autogenerated input type of InviteEnterpriseAdmin. +type InviteEnterpriseAdminInput struct { + // The ID of the enterprise to which you want to invite an administrator. (Required.) + EnterpriseID ID `json:"enterpriseId"` + + // The login of a user to invite as an administrator. (Optional.) + Invitee *String `json:"invitee,omitempty"` + // The email of the person to invite as an administrator. (Optional.) + Email *String `json:"email,omitempty"` + // The role of the administrator. (Optional.) + Role *EnterpriseAdministratorRole `json:"role,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// IssueFilters represents ways in which to filter lists of issues. +type IssueFilters struct { + + // List issues assigned to given name. Pass in `null` for issues with no assigned user, and `*` for issues assigned to any user. (Optional.) + Assignee *String `json:"assignee,omitempty"` + // List issues created by given name. (Optional.) + CreatedBy *String `json:"createdBy,omitempty"` + // List issues where the list of label names exist on the issue. (Optional.) + Labels *[]String `json:"labels,omitempty"` + // List issues where the given name is mentioned in the issue. (Optional.) + Mentioned *String `json:"mentioned,omitempty"` + // List issues by given milestone argument. If an string representation of an integer is passed, it should refer to a milestone by its number field. Pass in `null` for issues with no milestone, and `*` for issues that are assigned to any milestone. (Optional.) + Milestone *String `json:"milestone,omitempty"` + // List issues that have been updated at or after the given date. (Optional.) + Since *DateTime `json:"since,omitempty"` + // List issues filtered by the list of states given. (Optional.) + States *[]IssueState `json:"states,omitempty"` + // List issues subscribed to by viewer. (Optional.) + ViewerSubscribed *Boolean `json:"viewerSubscribed,omitempty"` +} + +// IssueOrder represents ways in which lists of issues can be ordered upon return. +type IssueOrder struct { + // The field in which to order issues by. (Required.) + Field IssueOrderField `json:"field"` + // The direction in which to order issues by the specified field. (Required.) + Direction OrderDirection `json:"direction"` +} + +// LanguageOrder represents ordering options for language connections. +type LanguageOrder struct { + // The field to order languages by. (Required.) + Field LanguageOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// LinkRepositoryToProjectInput is an autogenerated input type of LinkRepositoryToProject. +type LinkRepositoryToProjectInput struct { + // The ID of the Project to link to a Repository. (Required.) + ProjectID ID `json:"projectId"` + // The ID of the Repository to link to a Project. (Required.) + RepositoryID ID `json:"repositoryId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// LockLockableInput is an autogenerated input type of LockLockable. +type LockLockableInput struct { + // ID of the issue or pull request to be locked. (Required.) + LockableID ID `json:"lockableId"` + + // A reason for why the issue or pull request will be locked. (Optional.) + LockReason *LockReason `json:"lockReason,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// MergeBranchInput is an autogenerated input type of MergeBranch. +type MergeBranchInput struct { + // The Node ID of the Repository containing the base branch that will be modified. (Required.) + RepositoryID ID `json:"repositoryId"` + // The name of the base branch that the provided head will be merged into. (Required.) + Base String `json:"base"` + // The head to merge into the base branch. This can be a branch name or a commit GitObjectID. (Required.) + Head String `json:"head"` + + // Message to use for the merge commit. If omitted, a default will be used. (Optional.) + CommitMessage *String `json:"commitMessage,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// MergePullRequestInput is an autogenerated input type of MergePullRequest. +type MergePullRequestInput struct { + // ID of the pull request to be merged. (Required.) + PullRequestID ID `json:"pullRequestId"` + + // Commit headline to use for the merge commit; if omitted, a default message will be used. (Optional.) + CommitHeadline *String `json:"commitHeadline,omitempty"` + // Commit body to use for the merge commit; if omitted, a default message will be used. (Optional.) + CommitBody *String `json:"commitBody,omitempty"` + // OID that the pull request head ref must match to allow merge; if omitted, no check is performed. (Optional.) + ExpectedHeadOid *GitObjectID `json:"expectedHeadOid,omitempty"` + // The merge method to use. If omitted, defaults to 'MERGE'. (Optional.) + MergeMethod *PullRequestMergeMethod `json:"mergeMethod,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// MilestoneOrder represents ordering options for milestone connections. +type MilestoneOrder struct { + // The field to order milestones by. (Required.) + Field MilestoneOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// MinimizeCommentInput is an autogenerated input type of MinimizeComment. +type MinimizeCommentInput struct { + // The Node ID of the subject to modify. (Required.) + SubjectID ID `json:"subjectId"` + // The classification of comment. (Required.) + Classifier ReportedContentClassifiers `json:"classifier"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// MoveProjectCardInput is an autogenerated input type of MoveProjectCard. +type MoveProjectCardInput struct { + // The id of the card to move. (Required.) + CardID ID `json:"cardId"` + // The id of the column to move it into. (Required.) + ColumnID ID `json:"columnId"` + + // Place the new card after the card with this id. Pass null to place it at the top. (Optional.) + AfterCardID *ID `json:"afterCardId,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// MoveProjectColumnInput is an autogenerated input type of MoveProjectColumn. +type MoveProjectColumnInput struct { + // The id of the column to move. (Required.) + ColumnID ID `json:"columnId"` + + // Place the new column after the column with this id. Pass null to place it at the front. (Optional.) + AfterColumnID *ID `json:"afterColumnId,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// OrganizationOrder represents ordering options for organization connections. +type OrganizationOrder struct { + // The field to order organizations by. (Required.) + Field OrganizationOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// PinIssueInput is an autogenerated input type of PinIssue. +type PinIssueInput struct { + // The ID of the issue to be pinned. (Required.) + IssueID ID `json:"issueId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// ProjectCardImport represents an issue or PR and its owning repository to be used in a project card. +type ProjectCardImport struct { + // Repository name with owner (owner/repository). (Required.) + Repository String `json:"repository"` + // The issue or pull request number. (Required.) + Number Int `json:"number"` +} + +// ProjectColumnImport represents a project column and a list of its issues and PRs. +type ProjectColumnImport struct { + // The name of the column. (Required.) + ColumnName String `json:"columnName"` + // The position of the column, starting from 0. (Required.) + Position Int `json:"position"` + + // A list of issues and pull requests in the column. (Optional.) + Issues *[]ProjectCardImport `json:"issues,omitempty"` +} + +// ProjectOrder represents ways in which lists of projects can be ordered upon return. +type ProjectOrder struct { + // The field in which to order projects by. (Required.) + Field ProjectOrderField `json:"field"` + // The direction in which to order projects by the specified field. (Required.) + Direction OrderDirection `json:"direction"` +} + +// PullRequestOrder represents ways in which lists of issues can be ordered upon return. +type PullRequestOrder struct { + // The field in which to order pull requests by. (Required.) + Field PullRequestOrderField `json:"field"` + // The direction in which to order pull requests by the specified field. (Required.) + Direction OrderDirection `json:"direction"` +} + +// ReactionOrder represents ways in which lists of reactions can be ordered upon return. +type ReactionOrder struct { + // The field in which to order reactions by. (Required.) + Field ReactionOrderField `json:"field"` + // The direction in which to order reactions by the specified field. (Required.) + Direction OrderDirection `json:"direction"` +} + +// RefOrder represents ways in which lists of git refs can be ordered upon return. +type RefOrder struct { + // The field in which to order refs by. (Required.) + Field RefOrderField `json:"field"` + // The direction in which to order refs by the specified field. (Required.) + Direction OrderDirection `json:"direction"` +} + +// RegenerateEnterpriseIdentityProviderRecoveryCodesInput is an autogenerated input type of RegenerateEnterpriseIdentityProviderRecoveryCodes. +type RegenerateEnterpriseIdentityProviderRecoveryCodesInput struct { + // The ID of the enterprise on which to set an identity provider. (Required.) + EnterpriseID ID `json:"enterpriseId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// RegistryPackageMetadatum represents represents a single registry metadatum. +type RegistryPackageMetadatum struct { + // Name of the metadatum. (Required.) + Name String `json:"name"` + // Value of the metadatum. (Required.) + Value String `json:"value"` + + // True, if the metadatum can be updated if it already exists. (Optional.) + Update *Boolean `json:"update,omitempty"` +} + +// ReleaseOrder represents ways in which lists of releases can be ordered upon return. +type ReleaseOrder struct { + // The field in which to order releases by. (Required.) + Field ReleaseOrderField `json:"field"` + // The direction in which to order releases by the specified field. (Required.) + Direction OrderDirection `json:"direction"` +} + +// RemoveAssigneesFromAssignableInput is an autogenerated input type of RemoveAssigneesFromAssignable. +type RemoveAssigneesFromAssignableInput struct { + // The id of the assignable object to remove assignees from. (Required.) + AssignableID ID `json:"assignableId"` + // The id of users to remove as assignees. (Required.) + AssigneeIDs []ID `json:"assigneeIds"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// RemoveEnterpriseAdminInput is an autogenerated input type of RemoveEnterpriseAdmin. +type RemoveEnterpriseAdminInput struct { + // The Enterprise ID from which to remove the administrator. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The login of the user to remove as an administrator. (Required.) + Login String `json:"login"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// RemoveEnterpriseOrganizationInput is an autogenerated input type of RemoveEnterpriseOrganization. +type RemoveEnterpriseOrganizationInput struct { + // The ID of the enterprise from which the organization should be removed. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The ID of the organization to remove from the enterprise. (Required.) + OrganizationID ID `json:"organizationId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// RemoveLabelsFromLabelableInput is an autogenerated input type of RemoveLabelsFromLabelable. +type RemoveLabelsFromLabelableInput struct { + // The id of the Labelable to remove labels from. (Required.) + LabelableID ID `json:"labelableId"` + // The ids of labels to remove. (Required.) + LabelIDs []ID `json:"labelIds"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// RemoveOutsideCollaboratorInput is an autogenerated input type of RemoveOutsideCollaborator. +type RemoveOutsideCollaboratorInput struct { + // The ID of the outside collaborator to remove. (Required.) + UserID ID `json:"userId"` + // The ID of the organization to remove the outside collaborator from. (Required.) + OrganizationID ID `json:"organizationId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// RemoveReactionInput is an autogenerated input type of RemoveReaction. +type RemoveReactionInput struct { + // The Node ID of the subject to modify. (Required.) + SubjectID ID `json:"subjectId"` + // The name of the emoji reaction to remove. (Required.) + Content ReactionContent `json:"content"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// RemoveStarInput is an autogenerated input type of RemoveStar. +type RemoveStarInput struct { + // The Starrable ID to unstar. (Required.) + StarrableID ID `json:"starrableId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// ReopenIssueInput is an autogenerated input type of ReopenIssue. +type ReopenIssueInput struct { + // ID of the issue to be opened. (Required.) + IssueID ID `json:"issueId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// ReopenPullRequestInput is an autogenerated input type of ReopenPullRequest. +type ReopenPullRequestInput struct { + // ID of the pull request to be reopened. (Required.) + PullRequestID ID `json:"pullRequestId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// RepositoryInvitationOrder represents ordering options for repository invitation connections. +type RepositoryInvitationOrder struct { + // The field to order repository invitations by. (Required.) + Field RepositoryInvitationOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// RepositoryOrder represents ordering options for repository connections. +type RepositoryOrder struct { + // The field to order repositories by. (Required.) + Field RepositoryOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// RequestReviewsInput is an autogenerated input type of RequestReviews. +type RequestReviewsInput struct { + // The Node ID of the pull request to modify. (Required.) + PullRequestID ID `json:"pullRequestId"` + + // The Node IDs of the user to request. (Optional.) + UserIDs *[]ID `json:"userIds,omitempty"` + // The Node IDs of the team to request. (Optional.) + TeamIDs *[]ID `json:"teamIds,omitempty"` + // Add users to the set rather than replace. (Optional.) + Union *Boolean `json:"union,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// ResolveReviewThreadInput is an autogenerated input type of ResolveReviewThread. +type ResolveReviewThreadInput struct { + // The ID of the thread to resolve. (Required.) + ThreadID ID `json:"threadId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// SavedReplyOrder represents ordering options for saved reply connections. +type SavedReplyOrder struct { + // The field to order saved replies by. (Required.) + Field SavedReplyOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// SecurityAdvisoryIdentifierFilter represents an advisory identifier to filter results on. +type SecurityAdvisoryIdentifierFilter struct { + // The identifier type. (Required.) + Type SecurityAdvisoryIdentifierType `json:"type"` + // The identifier string. Supports exact or partial matching. (Required.) + Value String `json:"value"` +} + +// SecurityAdvisoryOrder represents ordering options for security advisory connections. +type SecurityAdvisoryOrder struct { + // The field to order security advisories by. (Required.) + Field SecurityAdvisoryOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// SecurityVulnerabilityOrder represents ordering options for security vulnerability connections. +type SecurityVulnerabilityOrder struct { + // The field to order security vulnerabilities by. (Required.) + Field SecurityVulnerabilityOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// SponsorsTierOrder represents ordering options for Sponsors tiers connections. +type SponsorsTierOrder struct { + // The field to order tiers by. (Required.) + Field SponsorsTierOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// SponsorshipOrder represents ordering options for sponsorship connections. +type SponsorshipOrder struct { + // The field to order sponsorship by. (Required.) + Field SponsorshipOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// StarOrder represents ways in which star connections can be ordered. +type StarOrder struct { + // The field in which to order nodes by. (Required.) + Field StarOrderField `json:"field"` + // The direction in which to order nodes. (Required.) + Direction OrderDirection `json:"direction"` +} + +// SubmitPullRequestReviewInput is an autogenerated input type of SubmitPullRequestReview. +type SubmitPullRequestReviewInput struct { + // The Pull Request Review ID to submit. (Required.) + PullRequestReviewID ID `json:"pullRequestReviewId"` + // The event to send to the Pull Request Review. (Required.) + Event PullRequestReviewEvent `json:"event"` + + // The text field to set on the Pull Request Review. (Optional.) + Body *String `json:"body,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// TeamDiscussionCommentOrder represents ways in which team discussion comment connections can be ordered. +type TeamDiscussionCommentOrder struct { + // The field by which to order nodes. (Required.) + Field TeamDiscussionCommentOrderField `json:"field"` + // The direction in which to order nodes. (Required.) + Direction OrderDirection `json:"direction"` +} + +// TeamDiscussionOrder represents ways in which team discussion connections can be ordered. +type TeamDiscussionOrder struct { + // The field by which to order nodes. (Required.) + Field TeamDiscussionOrderField `json:"field"` + // The direction in which to order nodes. (Required.) + Direction OrderDirection `json:"direction"` +} + +// TeamMemberOrder represents ordering options for team member connections. +type TeamMemberOrder struct { + // The field to order team members by. (Required.) + Field TeamMemberOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// TeamOrder represents ways in which team connections can be ordered. +type TeamOrder struct { + // The field in which to order nodes by. (Required.) + Field TeamOrderField `json:"field"` + // The direction in which to order nodes. (Required.) + Direction OrderDirection `json:"direction"` +} + +// TeamRepositoryOrder represents ordering options for team repository connections. +type TeamRepositoryOrder struct { + // The field to order repositories by. (Required.) + Field TeamRepositoryOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} + +// TransferIssueInput is an autogenerated input type of TransferIssue. +type TransferIssueInput struct { + // The Node ID of the issue to be transferred. (Required.) + IssueID ID `json:"issueId"` + // The Node ID of the repository the issue should be transferred to. (Required.) + RepositoryID ID `json:"repositoryId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UnarchiveRepositoryInput is an autogenerated input type of UnarchiveRepository. +type UnarchiveRepositoryInput struct { + // The ID of the repository to unarchive. (Required.) + RepositoryID ID `json:"repositoryId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UnfollowUserInput is an autogenerated input type of UnfollowUser. +type UnfollowUserInput struct { + // ID of the user to unfollow. (Required.) + UserID ID `json:"userId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UnlinkRepositoryFromProjectInput is an autogenerated input type of UnlinkRepositoryFromProject. +type UnlinkRepositoryFromProjectInput struct { + // The ID of the Project linked to the Repository. (Required.) + ProjectID ID `json:"projectId"` + // The ID of the Repository linked to the Project. (Required.) + RepositoryID ID `json:"repositoryId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UnlockLockableInput is an autogenerated input type of UnlockLockable. +type UnlockLockableInput struct { + // ID of the issue or pull request to be unlocked. (Required.) + LockableID ID `json:"lockableId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UnmarkIssueAsDuplicateInput is an autogenerated input type of UnmarkIssueAsDuplicate. +type UnmarkIssueAsDuplicateInput struct { + // ID of the issue or pull request currently marked as a duplicate. (Required.) + DuplicateID ID `json:"duplicateId"` + // ID of the issue or pull request currently considered canonical/authoritative/original. (Required.) + CanonicalID ID `json:"canonicalId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UnminimizeCommentInput is an autogenerated input type of UnminimizeComment. +type UnminimizeCommentInput struct { + // The Node ID of the subject to modify. (Required.) + SubjectID ID `json:"subjectId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UnpinIssueInput is an autogenerated input type of UnpinIssue. +type UnpinIssueInput struct { + // The ID of the issue to be unpinned. (Required.) + IssueID ID `json:"issueId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UnresolveReviewThreadInput is an autogenerated input type of UnresolveReviewThread. +type UnresolveReviewThreadInput struct { + // The ID of the thread to unresolve. (Required.) + ThreadID ID `json:"threadId"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateBranchProtectionRuleInput is an autogenerated input type of UpdateBranchProtectionRule. +type UpdateBranchProtectionRuleInput struct { + // The global relay id of the branch protection rule to be updated. (Required.) + BranchProtectionRuleID ID `json:"branchProtectionRuleId"` + + // The glob-like pattern used to determine matching branches. (Optional.) + Pattern *String `json:"pattern,omitempty"` + // Are approving reviews required to update matching branches. (Optional.) + RequiresApprovingReviews *Boolean `json:"requiresApprovingReviews,omitempty"` + // Number of approving reviews required to update matching branches. (Optional.) + RequiredApprovingReviewCount *Int `json:"requiredApprovingReviewCount,omitempty"` + // Are commits required to be signed. (Optional.) + RequiresCommitSignatures *Boolean `json:"requiresCommitSignatures,omitempty"` + // Can admins overwrite branch protection. (Optional.) + IsAdminEnforced *Boolean `json:"isAdminEnforced,omitempty"` + // Are status checks required to update matching branches. (Optional.) + RequiresStatusChecks *Boolean `json:"requiresStatusChecks,omitempty"` + // Are branches required to be up to date before merging. (Optional.) + RequiresStrictStatusChecks *Boolean `json:"requiresStrictStatusChecks,omitempty"` + // Are reviews from code owners required to update matching branches. (Optional.) + RequiresCodeOwnerReviews *Boolean `json:"requiresCodeOwnerReviews,omitempty"` + // Will new commits pushed to matching branches dismiss pull request review approvals. (Optional.) + DismissesStaleReviews *Boolean `json:"dismissesStaleReviews,omitempty"` + // Is dismissal of pull request reviews restricted. (Optional.) + RestrictsReviewDismissals *Boolean `json:"restrictsReviewDismissals,omitempty"` + // A list of User or Team IDs allowed to dismiss reviews on pull requests targeting matching branches. (Optional.) + ReviewDismissalActorIDs *[]ID `json:"reviewDismissalActorIds,omitempty"` + // Is pushing to matching branches restricted. (Optional.) + RestrictsPushes *Boolean `json:"restrictsPushes,omitempty"` + // A list of User, Team or App IDs allowed to push to matching branches. (Optional.) + PushActorIDs *[]ID `json:"pushActorIds,omitempty"` + // List of required status check contexts that must pass for commits to be accepted to matching branches. (Optional.) + RequiredStatusCheckContexts *[]String `json:"requiredStatusCheckContexts,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseActionExecutionCapabilitySettingInput is an autogenerated input type of UpdateEnterpriseActionExecutionCapabilitySetting. +type UpdateEnterpriseActionExecutionCapabilitySettingInput struct { + // The ID of the enterprise on which to set the members can create repositories setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the action execution capability setting on the enterprise. (Required.) + Capability ActionExecutionCapabilitySetting `json:"capability"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseAdministratorRoleInput is an autogenerated input type of UpdateEnterpriseAdministratorRole. +type UpdateEnterpriseAdministratorRoleInput struct { + // The ID of the Enterprise which the admin belongs to. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The login of a administrator whose role is being changed. (Required.) + Login String `json:"login"` + // The new role for the Enterprise administrator. (Required.) + Role EnterpriseAdministratorRole `json:"role"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput is an autogenerated input type of UpdateEnterpriseAllowPrivateRepositoryForkingSetting. +type UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput struct { + // The ID of the enterprise on which to set the allow private repository forking setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the allow private repository forking setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseDefaultRepositoryPermissionSettingInput is an autogenerated input type of UpdateEnterpriseDefaultRepositoryPermissionSetting. +type UpdateEnterpriseDefaultRepositoryPermissionSettingInput struct { + // The ID of the enterprise on which to set the default repository permission setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the default repository permission setting on the enterprise. (Required.) + SettingValue EnterpriseDefaultRepositoryPermissionSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput is an autogenerated input type of UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting. +type UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput struct { + // The ID of the enterprise on which to set the members can change repository visibility setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the members can change repository visibility setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseMembersCanCreateRepositoriesSettingInput is an autogenerated input type of UpdateEnterpriseMembersCanCreateRepositoriesSetting. +type UpdateEnterpriseMembersCanCreateRepositoriesSettingInput struct { + // The ID of the enterprise on which to set the members can create repositories setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + + // Value for the members can create repositories setting on the enterprise. This or the granular public/private/internal allowed fields (but not both) must be provided. (Optional.) + SettingValue *EnterpriseMembersCanCreateRepositoriesSettingValue `json:"settingValue,omitempty"` + // When false, allow member organizations to set their own repository creation member privileges. (Optional.) + MembersCanCreateRepositoriesPolicyEnabled *Boolean `json:"membersCanCreateRepositoriesPolicyEnabled,omitempty"` + // Allow members to create public repositories. Defaults to current value. (Optional.) + MembersCanCreatePublicRepositories *Boolean `json:"membersCanCreatePublicRepositories,omitempty"` + // Allow members to create private repositories. Defaults to current value. (Optional.) + MembersCanCreatePrivateRepositories *Boolean `json:"membersCanCreatePrivateRepositories,omitempty"` + // Allow members to create internal repositories. Defaults to current value. (Optional.) + MembersCanCreateInternalRepositories *Boolean `json:"membersCanCreateInternalRepositories,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseMembersCanDeleteIssuesSettingInput is an autogenerated input type of UpdateEnterpriseMembersCanDeleteIssuesSetting. +type UpdateEnterpriseMembersCanDeleteIssuesSettingInput struct { + // The ID of the enterprise on which to set the members can delete issues setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the members can delete issues setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput is an autogenerated input type of UpdateEnterpriseMembersCanDeleteRepositoriesSetting. +type UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput struct { + // The ID of the enterprise on which to set the members can delete repositories setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the members can delete repositories setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput is an autogenerated input type of UpdateEnterpriseMembersCanInviteCollaboratorsSetting. +type UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput struct { + // The ID of the enterprise on which to set the members can invite collaborators setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the members can invite collaborators setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseMembersCanMakePurchasesSettingInput is an autogenerated input type of UpdateEnterpriseMembersCanMakePurchasesSetting. +type UpdateEnterpriseMembersCanMakePurchasesSettingInput struct { + // The ID of the enterprise on which to set the members can make purchases setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the members can make purchases setting on the enterprise. (Required.) + SettingValue EnterpriseMembersCanMakePurchasesSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput is an autogenerated input type of UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting. +type UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput struct { + // The ID of the enterprise on which to set the members can update protected branches setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the members can update protected branches setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput is an autogenerated input type of UpdateEnterpriseMembersCanViewDependencyInsightsSetting. +type UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput struct { + // The ID of the enterprise on which to set the members can view dependency insights setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the members can view dependency insights setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseOrganizationProjectsSettingInput is an autogenerated input type of UpdateEnterpriseOrganizationProjectsSetting. +type UpdateEnterpriseOrganizationProjectsSettingInput struct { + // The ID of the enterprise on which to set the organization projects setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the organization projects setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseProfileInput is an autogenerated input type of UpdateEnterpriseProfile. +type UpdateEnterpriseProfileInput struct { + // The Enterprise ID to update. (Required.) + EnterpriseID ID `json:"enterpriseId"` + + // The name of the enterprise. (Optional.) + Name *String `json:"name,omitempty"` + // The description of the enterprise. (Optional.) + Description *String `json:"description,omitempty"` + // The URL of the enterprise's website. (Optional.) + WebsiteURL *String `json:"websiteUrl,omitempty"` + // The location of the enterprise. (Optional.) + Location *String `json:"location,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseRepositoryProjectsSettingInput is an autogenerated input type of UpdateEnterpriseRepositoryProjectsSetting. +type UpdateEnterpriseRepositoryProjectsSettingInput struct { + // The ID of the enterprise on which to set the repository projects setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the repository projects setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseTeamDiscussionsSettingInput is an autogenerated input type of UpdateEnterpriseTeamDiscussionsSetting. +type UpdateEnterpriseTeamDiscussionsSettingInput struct { + // The ID of the enterprise on which to set the team discussions setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the team discussions setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledDisabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput is an autogenerated input type of UpdateEnterpriseTwoFactorAuthenticationRequiredSetting. +type UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput struct { + // The ID of the enterprise on which to set the two factor authentication required setting. (Required.) + EnterpriseID ID `json:"enterpriseId"` + // The value for the two factor authentication required setting on the enterprise. (Required.) + SettingValue EnterpriseEnabledSettingValue `json:"settingValue"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateIssueCommentInput is an autogenerated input type of UpdateIssueComment. +type UpdateIssueCommentInput struct { + // The ID of the IssueComment to modify. (Required.) + ID ID `json:"id"` + // The updated text of the comment. (Required.) + Body String `json:"body"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateIssueInput is an autogenerated input type of UpdateIssue. +type UpdateIssueInput struct { + // The ID of the Issue to modify. (Required.) + ID ID `json:"id"` + + // The title for the issue. (Optional.) + Title *String `json:"title,omitempty"` + // The body for the issue description. (Optional.) + Body *String `json:"body,omitempty"` + // An array of Node IDs of users for this issue. (Optional.) + AssigneeIDs *[]ID `json:"assigneeIds,omitempty"` + // The Node ID of the milestone for this issue. (Optional.) + MilestoneID *ID `json:"milestoneId,omitempty"` + // An array of Node IDs of labels for this issue. (Optional.) + LabelIDs *[]ID `json:"labelIds,omitempty"` + // The desired issue state. (Optional.) + State *IssueState `json:"state,omitempty"` + // An array of Node IDs for projects associated with this issue. (Optional.) + ProjectIDs *[]ID `json:"projectIds,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateProjectCardInput is an autogenerated input type of UpdateProjectCard. +type UpdateProjectCardInput struct { + // The ProjectCard ID to update. (Required.) + ProjectCardID ID `json:"projectCardId"` + + // Whether or not the ProjectCard should be archived. (Optional.) + IsArchived *Boolean `json:"isArchived,omitempty"` + // The note of ProjectCard. (Optional.) + Note *String `json:"note,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateProjectColumnInput is an autogenerated input type of UpdateProjectColumn. +type UpdateProjectColumnInput struct { + // The ProjectColumn ID to update. (Required.) + ProjectColumnID ID `json:"projectColumnId"` + // The name of project column. (Required.) + Name String `json:"name"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateProjectInput is an autogenerated input type of UpdateProject. +type UpdateProjectInput struct { + // The Project ID to update. (Required.) + ProjectID ID `json:"projectId"` + + // The name of project. (Optional.) + Name *String `json:"name,omitempty"` + // The description of project. (Optional.) + Body *String `json:"body,omitempty"` + // Whether the project is open or closed. (Optional.) + State *ProjectState `json:"state,omitempty"` + // Whether the project is public or not. (Optional.) + Public *Boolean `json:"public,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdatePullRequestInput is an autogenerated input type of UpdatePullRequest. +type UpdatePullRequestInput struct { + // The Node ID of the pull request. (Required.) + PullRequestID ID `json:"pullRequestId"` + + // The name of the branch you want your changes pulled into. This should be an existing branch on the current repository. (Optional.) + BaseRefName *String `json:"baseRefName,omitempty"` + // The title of the pull request. (Optional.) + Title *String `json:"title,omitempty"` + // The contents of the pull request. (Optional.) + Body *String `json:"body,omitempty"` + // The target state of the pull request. (Optional.) + State *PullRequestUpdateState `json:"state,omitempty"` + // Indicates whether maintainers can modify the pull request. (Optional.) + MaintainerCanModify *Boolean `json:"maintainerCanModify,omitempty"` + // An array of Node IDs of users for this pull request. (Optional.) + AssigneeIDs *[]ID `json:"assigneeIds,omitempty"` + // The Node ID of the milestone for this pull request. (Optional.) + MilestoneID *ID `json:"milestoneId,omitempty"` + // An array of Node IDs of labels for this pull request. (Optional.) + LabelIDs *[]ID `json:"labelIds,omitempty"` + // An array of Node IDs for projects associated with this pull request. (Optional.) + ProjectIDs *[]ID `json:"projectIds,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdatePullRequestReviewCommentInput is an autogenerated input type of UpdatePullRequestReviewComment. +type UpdatePullRequestReviewCommentInput struct { + // The Node ID of the comment to modify. (Required.) + PullRequestReviewCommentID ID `json:"pullRequestReviewCommentId"` + // The text of the comment. (Required.) + Body String `json:"body"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdatePullRequestReviewInput is an autogenerated input type of UpdatePullRequestReview. +type UpdatePullRequestReviewInput struct { + // The Node ID of the pull request review to modify. (Required.) + PullRequestReviewID ID `json:"pullRequestReviewId"` + // The contents of the pull request review body. (Required.) + Body String `json:"body"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateRefInput is an autogenerated input type of UpdateRef. +type UpdateRefInput struct { + // The Node ID of the Ref to be updated. (Required.) + RefID ID `json:"refId"` + // The GitObjectID that the Ref shall be updated to target. (Required.) + Oid GitObjectID `json:"oid"` + + // Permit updates of branch Refs that are not fast-forwards?. (Optional.) + Force *Boolean `json:"force,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateRepositoryInput is an autogenerated input type of UpdateRepository. +type UpdateRepositoryInput struct { + // The ID of the repository to update. (Required.) + RepositoryID ID `json:"repositoryId"` + + // The new name of the repository. (Optional.) + Name *String `json:"name,omitempty"` + // A new description for the repository. Pass an empty string to erase the existing description. (Optional.) + Description *String `json:"description,omitempty"` + // Whether this repository should be marked as a template such that anyone who can access it can create new repositories with the same files and directory structure. (Optional.) + Template *Boolean `json:"template,omitempty"` + // The URL for a web page about this repository. Pass an empty string to erase the existing URL. (Optional.) + HomepageURL *URI `json:"homepageUrl,omitempty"` + // Indicates if the repository should have the wiki feature enabled. (Optional.) + HasWikiEnabled *Boolean `json:"hasWikiEnabled,omitempty"` + // Indicates if the repository should have the issues feature enabled. (Optional.) + HasIssuesEnabled *Boolean `json:"hasIssuesEnabled,omitempty"` + // Indicates if the repository should have the project boards feature enabled. (Optional.) + HasProjectsEnabled *Boolean `json:"hasProjectsEnabled,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateSubscriptionInput is an autogenerated input type of UpdateSubscription. +type UpdateSubscriptionInput struct { + // The Node ID of the subscribable object to modify. (Required.) + SubscribableID ID `json:"subscribableId"` + // The new state of the subscription. (Required.) + State SubscriptionState `json:"state"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateTeamDiscussionCommentInput is an autogenerated input type of UpdateTeamDiscussionComment. +type UpdateTeamDiscussionCommentInput struct { + // The ID of the comment to modify. (Required.) + ID ID `json:"id"` + // The updated text of the comment. (Required.) + Body String `json:"body"` + + // The current version of the body content. (Optional.) + BodyVersion *String `json:"bodyVersion,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateTeamDiscussionInput is an autogenerated input type of UpdateTeamDiscussion. +type UpdateTeamDiscussionInput struct { + // The Node ID of the discussion to modify. (Required.) + ID ID `json:"id"` + + // The updated title of the discussion. (Optional.) + Title *String `json:"title,omitempty"` + // The updated text of the discussion. (Optional.) + Body *String `json:"body,omitempty"` + // The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. (Optional.) + BodyVersion *String `json:"bodyVersion,omitempty"` + // If provided, sets the pinned state of the updated discussion. (Optional.) + Pinned *Boolean `json:"pinned,omitempty"` + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UpdateTopicsInput is an autogenerated input type of UpdateTopics. +type UpdateTopicsInput struct { + // The Node ID of the repository. (Required.) + RepositoryID ID `json:"repositoryId"` + // An array of topic names. (Required.) + TopicNames []String `json:"topicNames"` + + // A unique identifier for the client performing the mutation. (Optional.) + ClientMutationID *String `json:"clientMutationId,omitempty"` +} + +// UserStatusOrder represents ordering options for user status connections. +type UserStatusOrder struct { + // The field to order user statuses by. (Required.) + Field UserStatusOrderField `json:"field"` + // The ordering direction. (Required.) + Direction OrderDirection `json:"direction"` +} diff --git a/vendor/github.com/shurcooL/githubv4/scalar.go b/vendor/github.com/shurcooL/githubv4/scalar.go new file mode 100644 index 0000000000..56a395303f --- /dev/null +++ b/vendor/github.com/shurcooL/githubv4/scalar.go @@ -0,0 +1,139 @@ +package githubv4 + +import ( + "crypto/x509" + "encoding/json" + "fmt" + "net/url" + "time" + + "github.com/shurcooL/graphql" +) + +// Note: These custom types are meant to be used in queries for now. +// But the plan is to switch to using native Go types (string, int, bool, time.Time, etc.). +// See https://github.com/shurcooL/githubv4/issues/9 for details. +// +// These custom types currently provide documentation, and their use +// is required for sending outbound queries. However, native Go types +// can be used for unmarshaling. Once https://github.com/shurcooL/githubv4/issues/9 +// is resolved, native Go types can completely replace these. + +type ( + // Boolean represents true or false values. + Boolean graphql.Boolean + + // Date is an ISO-8601 encoded date. + Date struct{ time.Time } + + // DateTime is an ISO-8601 encoded UTC date. + DateTime struct{ time.Time } + + // Float represents signed double-precision fractional values as + // specified by IEEE 754. + Float graphql.Float + + // GitObjectID is a Git object ID. For example, + // "912ec1990bd09f8fc128c3fa6b59105085aabc03". + GitObjectID string + + // GitTimestamp is an ISO-8601 encoded date. + // Unlike the DateTime type, GitTimestamp is not converted in UTC. + GitTimestamp struct{ time.Time } + + // HTML is a string containing HTML code. + HTML string + + // ID represents a unique identifier that is Base64 obfuscated. It + // is often used to refetch an object or as key for a cache. The ID + // type appears in a JSON response as a String; however, it is not + // intended to be human-readable. When expected as an input type, + // any string (such as "VXNlci0xMA==") or integer (such as 4) input + // value will be accepted as an ID. + ID graphql.ID + + // Int represents non-fractional signed whole numeric values. + // Int can represent values between -(2^31) and 2^31 - 1. + Int graphql.Int + + // String represents textual data as UTF-8 character sequences. + // This type is most often used by GraphQL to represent free-form + // human-readable text. + String graphql.String + + // URI is an RFC 3986, RFC 3987, and RFC 6570 (level 4) compliant URI. + URI struct{ *url.URL } + + // X509Certificate is a valid x509 certificate. + X509Certificate struct{ *x509.Certificate } +) + +// MarshalJSON implements the json.Marshaler interface. +// The URI is a quoted string. +func (u URI) MarshalJSON() ([]byte, error) { + return json.Marshal(u.String()) +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +// The URI is expected to be a quoted string. +func (u *URI) UnmarshalJSON(data []byte) error { + // Ignore null, like in the main JSON package. + if string(data) == "null" { + return nil + } + var s string + err := json.Unmarshal(data, &s) + if err != nil { + return err + } + u.URL, err = url.Parse(s) + return err +} + +// MarshalJSON implements the json.Marshaler interface. +func (x X509Certificate) MarshalJSON() ([]byte, error) { + // TODO: Implement. + return nil, fmt.Errorf("X509Certificate.MarshalJSON: not implemented") +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (x *X509Certificate) UnmarshalJSON(data []byte) error { + // TODO: Implement. + return fmt.Errorf("X509Certificate.UnmarshalJSON: not implemented") +} + +// NewBoolean is a helper to make a new *Boolean. +func NewBoolean(v Boolean) *Boolean { return &v } + +// NewDate is a helper to make a new *Date. +func NewDate(v Date) *Date { return &v } + +// NewDateTime is a helper to make a new *DateTime. +func NewDateTime(v DateTime) *DateTime { return &v } + +// NewFloat is a helper to make a new *Float. +func NewFloat(v Float) *Float { return &v } + +// NewGitObjectID is a helper to make a new *GitObjectID. +func NewGitObjectID(v GitObjectID) *GitObjectID { return &v } + +// NewGitTimestamp is a helper to make a new *GitTimestamp. +func NewGitTimestamp(v GitTimestamp) *GitTimestamp { return &v } + +// NewHTML is a helper to make a new *HTML. +func NewHTML(v HTML) *HTML { return &v } + +// NewID is a helper to make a new *ID. +func NewID(v ID) *ID { return &v } + +// NewInt is a helper to make a new *Int. +func NewInt(v Int) *Int { return &v } + +// NewString is a helper to make a new *String. +func NewString(v String) *String { return &v } + +// NewURI is a helper to make a new *URI. +func NewURI(v URI) *URI { return &v } + +// NewX509Certificate is a helper to make a new *X509Certificate. +func NewX509Certificate(v X509Certificate) *X509Certificate { return &v } diff --git a/vendor/github.com/shurcooL/graphql/.travis.yml b/vendor/github.com/shurcooL/graphql/.travis.yml new file mode 100644 index 0000000000..93b1fcdb31 --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/.travis.yml @@ -0,0 +1,16 @@ +sudo: false +language: go +go: + - 1.x + - master +matrix: + allow_failures: + - go: master + fast_finish: true +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d -s .) + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/shurcooL/graphql/LICENSE b/vendor/github.com/shurcooL/graphql/LICENSE new file mode 100644 index 0000000000..ca4c77642d --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Dmitri Shuralyov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/shurcooL/graphql/README.md b/vendor/github.com/shurcooL/graphql/README.md new file mode 100644 index 0000000000..6fa03bd4cd --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/README.md @@ -0,0 +1,293 @@ +graphql +======= + +[![Build Status](https://travis-ci.org/shurcooL/graphql.svg?branch=master)](https://travis-ci.org/shurcooL/graphql) [![GoDoc](https://godoc.org/github.com/shurcooL/graphql?status.svg)](https://godoc.org/github.com/shurcooL/graphql) + +Package `graphql` provides a GraphQL client implementation. + +For more information, see package [`github.com/shurcooL/githubv4`](https://github.com/shurcooL/githubv4), which is a specialized version targeting GitHub GraphQL API v4. That package is driving the feature development. + +**Status:** In active early research and development. The API will change when opportunities for improvement are discovered; it is not yet frozen. + +Installation +------------ + +`graphql` requires Go version 1.8 or later. + +```bash +go get -u github.com/shurcooL/graphql +``` + +Usage +----- + +Construct a GraphQL client, specifying the GraphQL server URL. Then, you can use it to make GraphQL queries and mutations. + +```Go +client := graphql.NewClient("https://example.com/graphql", nil) +// Use client... +``` + +### Authentication + +Some GraphQL servers may require authentication. The `graphql` package does not directly handle authentication. Instead, when creating a new client, you're expected to pass an `http.Client` that performs authentication. The easiest and recommended way to do this is to use the [`golang.org/x/oauth2`](https://golang.org/x/oauth2) package. You'll need an OAuth token with the right scopes. Then: + +```Go +import "golang.org/x/oauth2" + +func main() { + src := oauth2.StaticTokenSource( + &oauth2.Token{AccessToken: os.Getenv("GRAPHQL_TOKEN")}, + ) + httpClient := oauth2.NewClient(context.Background(), src) + + client := graphql.NewClient("https://example.com/graphql", httpClient) + // Use client... +``` + +### Simple Query + +To make a GraphQL query, you need to define a corresponding Go type. + +For example, to make the following GraphQL query: + +```GraphQL +query { + me { + name + } +} +``` + +You can define this variable: + +```Go +var query struct { + Me struct { + Name graphql.String + } +} +``` + +Then call `client.Query`, passing a pointer to it: + +```Go +err := client.Query(context.Background(), &query, nil) +if err != nil { + // Handle error. +} +fmt.Println(query.Me.Name) + +// Output: Luke Skywalker +``` + +### Arguments and Variables + +Often, you'll want to specify arguments on some fields. You can use the `graphql` struct field tag for this. + +For example, to make the following GraphQL query: + +```GraphQL +{ + human(id: "1000") { + name + height(unit: METER) + } +} +``` + +You can define this variable: + +```Go +var q struct { + Human struct { + Name graphql.String + Height graphql.Float `graphql:"height(unit: METER)"` + } `graphql:"human(id: \"1000\")"` +} +``` + +Then call `client.Query`: + +```Go +err := client.Query(context.Background(), &q, nil) +if err != nil { + // Handle error. +} +fmt.Println(q.Human.Name) +fmt.Println(q.Human.Height) + +// Output: +// Luke Skywalker +// 1.72 +``` + +However, that'll only work if the arguments are constant and known in advance. Otherwise, you will need to make use of variables. Replace the constants in the struct field tag with variable names: + +```Go +var q struct { + Human struct { + Name graphql.String + Height graphql.Float `graphql:"height(unit: $unit)"` + } `graphql:"human(id: $id)"` +} +``` + +Then, define a `variables` map with their values: + +```Go +variables := map[string]interface{}{ + "id": graphql.ID(id), + "unit": starwars.LengthUnit("METER"), +} +``` + +Finally, call `client.Query` providing `variables`: + +```Go +err := client.Query(context.Background(), &q, variables) +if err != nil { + // Handle error. +} +``` + +### Inline Fragments + +Some GraphQL queries contain inline fragments. You can use the `graphql` struct field tag to express them. + +For example, to make the following GraphQL query: + +```GraphQL +{ + hero(episode: "JEDI") { + name + ... on Droid { + primaryFunction + } + ... on Human { + height + } + } +} +``` + +You can define this variable: + +```Go +var q struct { + Hero struct { + Name graphql.String + Droid struct { + PrimaryFunction graphql.String + } `graphql:"... on Droid"` + Human struct { + Height graphql.Float + } `graphql:"... on Human"` + } `graphql:"hero(episode: \"JEDI\")"` +} +``` + +Alternatively, you can define the struct types corresponding to inline fragments, and use them as embedded fields in your query: + +```Go +type ( + DroidFragment struct { + PrimaryFunction graphql.String + } + HumanFragment struct { + Height graphql.Float + } +) + +var q struct { + Hero struct { + Name graphql.String + DroidFragment `graphql:"... on Droid"` + HumanFragment `graphql:"... on Human"` + } `graphql:"hero(episode: \"JEDI\")"` +} +``` + +Then call `client.Query`: + +```Go +err := client.Query(context.Background(), &q, nil) +if err != nil { + // Handle error. +} +fmt.Println(q.Hero.Name) +fmt.Println(q.Hero.PrimaryFunction) +fmt.Println(q.Hero.Height) + +// Output: +// R2-D2 +// Astromech +// 0 +``` + +### Mutations + +Mutations often require information that you can only find out by performing a query first. Let's suppose you've already done that. + +For example, to make the following GraphQL mutation: + +```GraphQL +mutation($ep: Episode!, $review: ReviewInput!) { + createReview(episode: $ep, review: $review) { + stars + commentary + } +} +variables { + "ep": "JEDI", + "review": { + "stars": 5, + "commentary": "This is a great movie!" + } +} +``` + +You can define: + +```Go +var m struct { + CreateReview struct { + Stars graphql.Int + Commentary graphql.String + } `graphql:"createReview(episode: $ep, review: $review)"` +} +variables := map[string]interface{}{ + "ep": starwars.Episode("JEDI"), + "review": starwars.ReviewInput{ + Stars: graphql.Int(5), + Commentary: graphql.String("This is a great movie!"), + }, +} +``` + +Then call `client.Mutate`: + +```Go +err := client.Mutate(context.Background(), &m, variables) +if err != nil { + // Handle error. +} +fmt.Printf("Created a %v star review: %v\n", m.CreateReview.Stars, m.CreateReview.Commentary) + +// Output: +// Created a 5 star review: This is a great movie! +``` + +Directories +----------- + +| Path | Synopsis | +|----------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------| +| [example/graphqldev](https://godoc.org/github.com/shurcooL/graphql/example/graphqldev) | graphqldev is a test program currently being used for developing graphql package. | +| [ident](https://godoc.org/github.com/shurcooL/graphql/ident) | Package ident provides functions for parsing and converting identifier names between various naming convention. | +| [internal/jsonutil](https://godoc.org/github.com/shurcooL/graphql/internal/jsonutil) | Package jsonutil provides a function for decoding JSON into a GraphQL query data structure. | + +License +------- + +- [MIT License](LICENSE) diff --git a/vendor/github.com/shurcooL/graphql/doc.go b/vendor/github.com/shurcooL/graphql/doc.go new file mode 100644 index 0000000000..69ec4e0387 --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/doc.go @@ -0,0 +1,11 @@ +// Package graphql provides a GraphQL client implementation. +// +// For more information, see package github.com/shurcooL/githubv4, +// which is a specialized version targeting GitHub GraphQL API v4. +// That package is driving the feature development. +// +// Status: In active early research and development. The API will change when +// opportunities for improvement are discovered; it is not yet frozen. +// +// For now, see README for more details. +package graphql // import "github.com/shurcooL/graphql" diff --git a/vendor/github.com/shurcooL/graphql/graphql.go b/vendor/github.com/shurcooL/graphql/graphql.go new file mode 100644 index 0000000000..85209562c6 --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/graphql.go @@ -0,0 +1,123 @@ +package graphql + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + + "github.com/shurcooL/graphql/internal/jsonutil" + "golang.org/x/net/context/ctxhttp" +) + +// Client is a GraphQL client. +type Client struct { + url string // GraphQL server URL. + httpClient *http.Client +} + +// NewClient creates a GraphQL client targeting the specified GraphQL server URL. +// If httpClient is nil, then http.DefaultClient is used. +func NewClient(url string, httpClient *http.Client) *Client { + if httpClient == nil { + httpClient = http.DefaultClient + } + return &Client{ + url: url, + httpClient: httpClient, + } +} + +// Query executes a single GraphQL query request, +// with a query derived from q, populating the response into it. +// q should be a pointer to struct that corresponds to the GraphQL schema. +func (c *Client) Query(ctx context.Context, q interface{}, variables map[string]interface{}) error { + return c.do(ctx, queryOperation, q, variables) +} + +// Mutate executes a single GraphQL mutation request, +// with a mutation derived from m, populating the response into it. +// m should be a pointer to struct that corresponds to the GraphQL schema. +func (c *Client) Mutate(ctx context.Context, m interface{}, variables map[string]interface{}) error { + return c.do(ctx, mutationOperation, m, variables) +} + +// do executes a single GraphQL operation. +func (c *Client) do(ctx context.Context, op operationType, v interface{}, variables map[string]interface{}) error { + var query string + switch op { + case queryOperation: + query = constructQuery(v, variables) + case mutationOperation: + query = constructMutation(v, variables) + } + in := struct { + Query string `json:"query"` + Variables map[string]interface{} `json:"variables,omitempty"` + }{ + Query: query, + Variables: variables, + } + var buf bytes.Buffer + err := json.NewEncoder(&buf).Encode(in) + if err != nil { + return err + } + resp, err := ctxhttp.Post(ctx, c.httpClient, c.url, "application/json", &buf) + if err != nil { + return err + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + body, _ := ioutil.ReadAll(resp.Body) + return fmt.Errorf("non-200 OK status code: %v body: %q", resp.Status, body) + } + var out struct { + Data *json.RawMessage + Errors errors + //Extensions interface{} // Unused. + } + err = json.NewDecoder(resp.Body).Decode(&out) + if err != nil { + // TODO: Consider including response body in returned error, if deemed helpful. + return err + } + if out.Data != nil { + err := jsonutil.UnmarshalGraphQL(*out.Data, v) + if err != nil { + // TODO: Consider including response body in returned error, if deemed helpful. + return err + } + } + if len(out.Errors) > 0 { + return out.Errors + } + return nil +} + +// errors represents the "errors" array in a response from a GraphQL server. +// If returned via error interface, the slice is expected to contain at least 1 element. +// +// Specification: https://facebook.github.io/graphql/#sec-Errors. +type errors []struct { + Message string + Locations []struct { + Line int + Column int + } +} + +// Error implements error interface. +func (e errors) Error() string { + return e[0].Message +} + +type operationType uint8 + +const ( + queryOperation operationType = iota + mutationOperation + //subscriptionOperation // Unused. +) diff --git a/vendor/github.com/shurcooL/graphql/ident/ident.go b/vendor/github.com/shurcooL/graphql/ident/ident.go new file mode 100644 index 0000000000..29e498ed9e --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/ident/ident.go @@ -0,0 +1,240 @@ +// Package ident provides functions for parsing and converting identifier names +// between various naming convention. It has support for MixedCaps, lowerCamelCase, +// and SCREAMING_SNAKE_CASE naming conventions. +package ident + +import ( + "strings" + "unicode" + "unicode/utf8" +) + +// ParseMixedCaps parses a MixedCaps identifier name. +// +// E.g., "ClientMutationID" -> {"Client", "Mutation", "ID"}. +func ParseMixedCaps(name string) Name { + var words Name + + // Split name at any lower -> Upper or Upper -> Upper,lower transitions. + // Check each word for initialisms. + runes := []rune(name) + w, i := 0, 0 // Index of start of word, scan. + for i+1 <= len(runes) { + eow := false // Whether we hit the end of a word. + if i+1 == len(runes) { + eow = true + } else if unicode.IsLower(runes[i]) && unicode.IsUpper(runes[i+1]) { + // lower -> Upper. + eow = true + } else if i+2 < len(runes) && unicode.IsUpper(runes[i]) && unicode.IsUpper(runes[i+1]) && unicode.IsLower(runes[i+2]) { + // Upper -> Upper,lower. End of acronym, followed by a word. + eow = true + + if string(runes[i:i+3]) == "IDs" { // Special case, plural form of ID initialism. + eow = false + } + } + i++ + if !eow { + continue + } + + // [w, i) is a word. + word := string(runes[w:i]) + if initialism, ok := isInitialism(word); ok { + words = append(words, initialism) + } else if i1, i2, ok := isTwoInitialisms(word); ok { + words = append(words, i1, i2) + } else { + words = append(words, word) + } + w = i + } + return words +} + +// ParseLowerCamelCase parses a lowerCamelCase identifier name. +// +// E.g., "clientMutationId" -> {"client", "Mutation", "Id"}. +func ParseLowerCamelCase(name string) Name { + var words Name + + // Split name at any Upper letters. + runes := []rune(name) + w, i := 0, 0 // Index of start of word, scan. + for i+1 <= len(runes) { + eow := false // Whether we hit the end of a word. + if i+1 == len(runes) { + eow = true + } else if unicode.IsUpper(runes[i+1]) { + // Upper letter. + eow = true + } + i++ + if !eow { + continue + } + + // [w, i) is a word. + words = append(words, string(runes[w:i])) + w = i + } + return words +} + +// ParseScreamingSnakeCase parses a SCREAMING_SNAKE_CASE identifier name. +// +// E.g., "CLIENT_MUTATION_ID" -> {"CLIENT", "MUTATION", "ID"}. +func ParseScreamingSnakeCase(name string) Name { + var words Name + + // Split name at '_' characters. + runes := []rune(name) + w, i := 0, 0 // Index of start of word, scan. + for i+1 <= len(runes) { + eow := false // Whether we hit the end of a word. + if i+1 == len(runes) { + eow = true + } else if runes[i+1] == '_' { + // Underscore. + eow = true + } + i++ + if !eow { + continue + } + + // [w, i) is a word. + words = append(words, string(runes[w:i])) + if i < len(runes) && runes[i] == '_' { + // Skip underscore. + i++ + } + w = i + } + return words +} + +// Name is an identifier name, broken up into individual words. +type Name []string + +// ToMixedCaps expresses identifer name in MixedCaps naming convention. +// +// E.g., "ClientMutationID". +func (n Name) ToMixedCaps() string { + for i, word := range n { + if strings.EqualFold(word, "IDs") { // Special case, plural form of ID initialism. + n[i] = "IDs" + continue + } + if initialism, ok := isInitialism(word); ok { + n[i] = initialism + continue + } + if brand, ok := isBrand(word); ok { + n[i] = brand + continue + } + r, size := utf8.DecodeRuneInString(word) + n[i] = string(unicode.ToUpper(r)) + strings.ToLower(word[size:]) + } + return strings.Join(n, "") +} + +// ToLowerCamelCase expresses identifer name in lowerCamelCase naming convention. +// +// E.g., "clientMutationId". +func (n Name) ToLowerCamelCase() string { + for i, word := range n { + if i == 0 { + n[i] = strings.ToLower(word) + continue + } + r, size := utf8.DecodeRuneInString(word) + n[i] = string(unicode.ToUpper(r)) + strings.ToLower(word[size:]) + } + return strings.Join(n, "") +} + +// isInitialism reports whether word is an initialism. +func isInitialism(word string) (string, bool) { + initialism := strings.ToUpper(word) + _, ok := initialisms[initialism] + return initialism, ok +} + +// isTwoInitialisms reports whether word is two initialisms. +func isTwoInitialisms(word string) (string, string, bool) { + word = strings.ToUpper(word) + for i := 2; i <= len(word)-2; i++ { // Shortest initialism is 2 characters long. + _, ok1 := initialisms[word[:i]] + _, ok2 := initialisms[word[i:]] + if ok1 && ok2 { + return word[:i], word[i:], true + } + } + return "", "", false +} + +// initialisms is the set of initialisms in the MixedCaps naming convention. +// Only add entries that are highly unlikely to be non-initialisms. +// For instance, "ID" is fine (Freudian code is rare), but "AND" is not. +var initialisms = map[string]struct{}{ + // These are the common initialisms from golint. Keep them in sync + // with https://gotools.org/github.com/golang/lint#commonInitialisms. + "ACL": {}, + "API": {}, + "ASCII": {}, + "CPU": {}, + "CSS": {}, + "DNS": {}, + "EOF": {}, + "GUID": {}, + "HTML": {}, + "HTTP": {}, + "HTTPS": {}, + "ID": {}, + "IP": {}, + "JSON": {}, + "LHS": {}, + "QPS": {}, + "RAM": {}, + "RHS": {}, + "RPC": {}, + "SLA": {}, + "SMTP": {}, + "SQL": {}, + "SSH": {}, + "TCP": {}, + "TLS": {}, + "TTL": {}, + "UDP": {}, + "UI": {}, + "UID": {}, + "UUID": {}, + "URI": {}, + "URL": {}, + "UTF8": {}, + "VM": {}, + "XML": {}, + "XMPP": {}, + "XSRF": {}, + "XSS": {}, + + // Additional common initialisms. + "RSS": {}, +} + +// isBrand reports whether word is a brand. +func isBrand(word string) (string, bool) { + brand, ok := brands[strings.ToLower(word)] + return brand, ok +} + +// brands is the map of brands in the MixedCaps naming convention; +// see https://dmitri.shuralyov.com/idiomatic-go#for-brands-or-words-with-more-than-1-capital-letter-lowercase-all-letters. +// Key is the lower case version of the brand, value is the canonical brand spelling. +// Only add entries that are highly unlikely to be non-brands. +var brands = map[string]string{ + "github": "GitHub", +} diff --git a/vendor/github.com/shurcooL/graphql/internal/jsonutil/graphql.go b/vendor/github.com/shurcooL/graphql/internal/jsonutil/graphql.go new file mode 100644 index 0000000000..15bae246fd --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/internal/jsonutil/graphql.go @@ -0,0 +1,311 @@ +// Package jsonutil provides a function for decoding JSON +// into a GraphQL query data structure. +package jsonutil + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "reflect" + "strings" +) + +// UnmarshalGraphQL parses the JSON-encoded GraphQL response data and stores +// the result in the GraphQL query data structure pointed to by v. +// +// The implementation is created on top of the JSON tokenizer available +// in "encoding/json".Decoder. +func UnmarshalGraphQL(data []byte, v interface{}) error { + dec := json.NewDecoder(bytes.NewReader(data)) + dec.UseNumber() + err := (&decoder{tokenizer: dec}).Decode(v) + if err != nil { + return err + } + tok, err := dec.Token() + switch err { + case io.EOF: + // Expect to get io.EOF. There shouldn't be any more + // tokens left after we've decoded v successfully. + return nil + case nil: + return fmt.Errorf("invalid token '%v' after top-level value", tok) + default: + return err + } +} + +// decoder is a JSON decoder that performs custom unmarshaling behavior +// for GraphQL query data structures. It's implemented on top of a JSON tokenizer. +type decoder struct { + tokenizer interface { + Token() (json.Token, error) + } + + // Stack of what part of input JSON we're in the middle of - objects, arrays. + parseState []json.Delim + + // Stacks of values where to unmarshal. + // The top of each stack is the reflect.Value where to unmarshal next JSON value. + // + // The reason there's more than one stack is because we might be unmarshaling + // a single JSON value into multiple GraphQL fragments or embedded structs, so + // we keep track of them all. + vs [][]reflect.Value +} + +// Decode decodes a single JSON value from d.tokenizer into v. +func (d *decoder) Decode(v interface{}) error { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Ptr { + return fmt.Errorf("cannot decode into non-pointer %T", v) + } + d.vs = [][]reflect.Value{{rv.Elem()}} + return d.decode() +} + +// decode decodes a single JSON value from d.tokenizer into d.vs. +func (d *decoder) decode() error { + // The loop invariant is that the top of each d.vs stack + // is where we try to unmarshal the next JSON value we see. + for len(d.vs) > 0 { + tok, err := d.tokenizer.Token() + if err == io.EOF { + return errors.New("unexpected end of JSON input") + } else if err != nil { + return err + } + + switch { + + // Are we inside an object and seeing next key (rather than end of object)? + case d.state() == '{' && tok != json.Delim('}'): + key, ok := tok.(string) + if !ok { + return errors.New("unexpected non-key in JSON input") + } + someFieldExist := false + for i := range d.vs { + v := d.vs[i][len(d.vs[i])-1] + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + var f reflect.Value + if v.Kind() == reflect.Struct { + f = fieldByGraphQLName(v, key) + if f.IsValid() { + someFieldExist = true + } + } + d.vs[i] = append(d.vs[i], f) + } + if !someFieldExist { + return fmt.Errorf("struct field for %q doesn't exist in any of %v places to unmarshal", key, len(d.vs)) + } + + // We've just consumed the current token, which was the key. + // Read the next token, which should be the value, and let the rest of code process it. + tok, err = d.tokenizer.Token() + if err == io.EOF { + return errors.New("unexpected end of JSON input") + } else if err != nil { + return err + } + + // Are we inside an array and seeing next value (rather than end of array)? + case d.state() == '[' && tok != json.Delim(']'): + someSliceExist := false + for i := range d.vs { + v := d.vs[i][len(d.vs[i])-1] + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + var f reflect.Value + if v.Kind() == reflect.Slice { + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) // v = append(v, T). + f = v.Index(v.Len() - 1) + someSliceExist = true + } + d.vs[i] = append(d.vs[i], f) + } + if !someSliceExist { + return fmt.Errorf("slice doesn't exist in any of %v places to unmarshal", len(d.vs)) + } + } + + switch tok := tok.(type) { + case string, json.Number, bool, nil: + // Value. + + for i := range d.vs { + v := d.vs[i][len(d.vs[i])-1] + if !v.IsValid() { + continue + } + err := unmarshalValue(tok, v) + if err != nil { + return err + } + } + d.popAllVs() + + case json.Delim: + switch tok { + case '{': + // Start of object. + + d.pushState(tok) + + frontier := make([]reflect.Value, len(d.vs)) // Places to look for GraphQL fragments/embedded structs. + for i := range d.vs { + v := d.vs[i][len(d.vs[i])-1] + frontier[i] = v + // TODO: Do this recursively or not? Add a test case if needed. + if v.Kind() == reflect.Ptr && v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) // v = new(T). + } + } + // Find GraphQL fragments/embedded structs recursively, adding to frontier + // as new ones are discovered and exploring them further. + for len(frontier) > 0 { + v := frontier[0] + frontier = frontier[1:] + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + if v.Kind() != reflect.Struct { + continue + } + for i := 0; i < v.NumField(); i++ { + if isGraphQLFragment(v.Type().Field(i)) || v.Type().Field(i).Anonymous { + // Add GraphQL fragment or embedded struct. + d.vs = append(d.vs, []reflect.Value{v.Field(i)}) + frontier = append(frontier, v.Field(i)) + } + } + } + case '[': + // Start of array. + + d.pushState(tok) + + for i := range d.vs { + v := d.vs[i][len(d.vs[i])-1] + // TODO: Confirm this is needed, write a test case. + //if v.Kind() == reflect.Ptr && v.IsNil() { + // v.Set(reflect.New(v.Type().Elem())) // v = new(T). + //} + + // Reset slice to empty (in case it had non-zero initial value). + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + if v.Kind() != reflect.Slice { + continue + } + v.Set(reflect.MakeSlice(v.Type(), 0, 0)) // v = make(T, 0, 0). + } + case '}', ']': + // End of object or array. + d.popAllVs() + d.popState() + default: + return errors.New("unexpected delimiter in JSON input") + } + default: + return errors.New("unexpected token in JSON input") + } + } + return nil +} + +// pushState pushes a new parse state s onto the stack. +func (d *decoder) pushState(s json.Delim) { + d.parseState = append(d.parseState, s) +} + +// popState pops a parse state (already obtained) off the stack. +// The stack must be non-empty. +func (d *decoder) popState() { + d.parseState = d.parseState[:len(d.parseState)-1] +} + +// state reports the parse state on top of stack, or 0 if empty. +func (d *decoder) state() json.Delim { + if len(d.parseState) == 0 { + return 0 + } + return d.parseState[len(d.parseState)-1] +} + +// popAllVs pops from all d.vs stacks, keeping only non-empty ones. +func (d *decoder) popAllVs() { + var nonEmpty [][]reflect.Value + for i := range d.vs { + d.vs[i] = d.vs[i][:len(d.vs[i])-1] + if len(d.vs[i]) > 0 { + nonEmpty = append(nonEmpty, d.vs[i]) + } + } + d.vs = nonEmpty +} + +// fieldByGraphQLName returns an exported struct field of struct v +// that matches GraphQL name, or invalid reflect.Value if none found. +func fieldByGraphQLName(v reflect.Value, name string) reflect.Value { + for i := 0; i < v.NumField(); i++ { + if v.Type().Field(i).PkgPath != "" { + // Skip unexported field. + continue + } + if hasGraphQLName(v.Type().Field(i), name) { + return v.Field(i) + } + } + return reflect.Value{} +} + +// hasGraphQLName reports whether struct field f has GraphQL name. +func hasGraphQLName(f reflect.StructField, name string) bool { + value, ok := f.Tag.Lookup("graphql") + if !ok { + // TODO: caseconv package is relatively slow. Optimize it, then consider using it here. + //return caseconv.MixedCapsToLowerCamelCase(f.Name) == name + return strings.EqualFold(f.Name, name) + } + value = strings.TrimSpace(value) // TODO: Parse better. + if strings.HasPrefix(value, "...") { + // GraphQL fragment. It doesn't have a name. + return false + } + if i := strings.Index(value, "("); i != -1 { + value = value[:i] + } + if i := strings.Index(value, ":"); i != -1 { + value = value[:i] + } + return strings.TrimSpace(value) == name +} + +// isGraphQLFragment reports whether struct field f is a GraphQL fragment. +func isGraphQLFragment(f reflect.StructField) bool { + value, ok := f.Tag.Lookup("graphql") + if !ok { + return false + } + value = strings.TrimSpace(value) // TODO: Parse better. + return strings.HasPrefix(value, "...") +} + +// unmarshalValue unmarshals JSON value into v. +// v must be addressable and not obtained by the use of unexported +// struct fields, otherwise unmarshalValue will panic. +func unmarshalValue(value json.Token, v reflect.Value) error { + b, err := json.Marshal(value) // TODO: Short-circuit (if profiling says it's worth it). + if err != nil { + return err + } + return json.Unmarshal(b, v.Addr().Interface()) +} diff --git a/vendor/github.com/shurcooL/graphql/query.go b/vendor/github.com/shurcooL/graphql/query.go new file mode 100644 index 0000000000..e10b77189b --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/query.go @@ -0,0 +1,131 @@ +package graphql + +import ( + "bytes" + "encoding/json" + "io" + "reflect" + "sort" + + "github.com/shurcooL/graphql/ident" +) + +func constructQuery(v interface{}, variables map[string]interface{}) string { + query := query(v) + if len(variables) > 0 { + return "query(" + queryArguments(variables) + ")" + query + } + return query +} + +func constructMutation(v interface{}, variables map[string]interface{}) string { + query := query(v) + if len(variables) > 0 { + return "mutation(" + queryArguments(variables) + ")" + query + } + return "mutation" + query +} + +// queryArguments constructs a minified arguments string for variables. +// +// E.g., map[string]interface{}{"a": Int(123), "b": NewBoolean(true)} -> "$a:Int!$b:Boolean". +func queryArguments(variables map[string]interface{}) string { + // Sort keys in order to produce deterministic output for testing purposes. + // TODO: If tests can be made to work with non-deterministic output, then no need to sort. + keys := make([]string, 0, len(variables)) + for k := range variables { + keys = append(keys, k) + } + sort.Strings(keys) + + var buf bytes.Buffer + for _, k := range keys { + io.WriteString(&buf, "$") + io.WriteString(&buf, k) + io.WriteString(&buf, ":") + writeArgumentType(&buf, reflect.TypeOf(variables[k]), true) + // Don't insert a comma here. + // Commas in GraphQL are insignificant, and we want minified output. + // See https://facebook.github.io/graphql/October2016/#sec-Insignificant-Commas. + } + return buf.String() +} + +// writeArgumentType writes a minified GraphQL type for t to w. +// value indicates whether t is a value (required) type or pointer (optional) type. +// If value is true, then "!" is written at the end of t. +func writeArgumentType(w io.Writer, t reflect.Type, value bool) { + if t.Kind() == reflect.Ptr { + // Pointer is an optional type, so no "!" at the end of the pointer's underlying type. + writeArgumentType(w, t.Elem(), false) + return + } + + switch t.Kind() { + case reflect.Slice, reflect.Array: + // List. E.g., "[Int]". + io.WriteString(w, "[") + writeArgumentType(w, t.Elem(), true) + io.WriteString(w, "]") + default: + // Named type. E.g., "Int". + name := t.Name() + if name == "string" { // HACK: Workaround for https://github.com/shurcooL/githubv4/issues/12. + name = "ID" + } + io.WriteString(w, name) + } + + if value { + // Value is a required type, so add "!" to the end. + io.WriteString(w, "!") + } +} + +// query uses writeQuery to recursively construct +// a minified query string from the provided struct v. +// +// E.g., struct{Foo Int, BarBaz *Boolean} -> "{foo,barBaz}". +func query(v interface{}) string { + var buf bytes.Buffer + writeQuery(&buf, reflect.TypeOf(v), false) + return buf.String() +} + +// writeQuery writes a minified query for t to w. +// If inline is true, the struct fields of t are inlined into parent struct. +func writeQuery(w io.Writer, t reflect.Type, inline bool) { + switch t.Kind() { + case reflect.Ptr, reflect.Slice: + writeQuery(w, t.Elem(), false) + case reflect.Struct: + // If the type implements json.Unmarshaler, it's a scalar. Don't expand it. + if reflect.PtrTo(t).Implements(jsonUnmarshaler) { + return + } + if !inline { + io.WriteString(w, "{") + } + for i := 0; i < t.NumField(); i++ { + if i != 0 { + io.WriteString(w, ",") + } + f := t.Field(i) + value, ok := f.Tag.Lookup("graphql") + inlineField := f.Anonymous && !ok + if !inlineField { + if ok { + io.WriteString(w, value) + } else { + io.WriteString(w, ident.ParseMixedCaps(f.Name).ToLowerCamelCase()) + } + } + writeQuery(w, f.Type, inlineField) + } + if !inline { + io.WriteString(w, "}") + } + } +} + +var jsonUnmarshaler = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem() diff --git a/vendor/github.com/shurcooL/graphql/scalar.go b/vendor/github.com/shurcooL/graphql/scalar.go new file mode 100644 index 0000000000..0f7ceea5e0 --- /dev/null +++ b/vendor/github.com/shurcooL/graphql/scalar.go @@ -0,0 +1,51 @@ +package graphql + +// Note: These custom types are meant to be used in queries for now. +// But the plan is to switch to using native Go types (string, int, bool, time.Time, etc.). +// See https://github.com/shurcooL/githubv4/issues/9 for details. +// +// These custom types currently provide documentation, and their use +// is required for sending outbound queries. However, native Go types +// can be used for unmarshaling. Once https://github.com/shurcooL/githubv4/issues/9 +// is resolved, native Go types can completely replace these. + +type ( + // Boolean represents true or false values. + Boolean bool + + // Float represents signed double-precision fractional values as + // specified by IEEE 754. + Float float64 + + // ID represents a unique identifier that is Base64 obfuscated. It + // is often used to refetch an object or as key for a cache. The ID + // type appears in a JSON response as a String; however, it is not + // intended to be human-readable. When expected as an input type, + // any string (such as "VXNlci0xMA==") or integer (such as 4) input + // value will be accepted as an ID. + ID interface{} + + // Int represents non-fractional signed whole numeric values. + // Int can represent values between -(2^31) and 2^31 - 1. + Int int32 + + // String represents textual data as UTF-8 character sequences. + // This type is most often used by GraphQL to represent free-form + // human-readable text. + String string +) + +// NewBoolean is a helper to make a new *Boolean. +func NewBoolean(v Boolean) *Boolean { return &v } + +// NewFloat is a helper to make a new *Float. +func NewFloat(v Float) *Float { return &v } + +// NewID is a helper to make a new *ID. +func NewID(v ID) *ID { return &v } + +// NewInt is a helper to make a new *Int. +func NewInt(v Int) *Int { return &v } + +// NewString is a helper to make a new *String. +func NewString(v String) *String { return &v } diff --git a/vendor/github.com/ulikunitz/xz/example.go b/vendor/github.com/ulikunitz/xz/example.go deleted file mode 100644 index 855e60aee5..0000000000 --- a/vendor/github.com/ulikunitz/xz/example.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2014-2017 Ulrich Kunitz. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -package main - -import ( - "bytes" - "io" - "log" - "os" - - "github.com/ulikunitz/xz" -) - -func main() { - const text = "The quick brown fox jumps over the lazy dog.\n" - var buf bytes.Buffer - // compress text - w, err := xz.NewWriter(&buf) - if err != nil { - log.Fatalf("xz.NewWriter error %s", err) - } - if _, err := io.WriteString(w, text); err != nil { - log.Fatalf("WriteString error %s", err) - } - if err := w.Close(); err != nil { - log.Fatalf("w.Close error %s", err) - } - // decompress buffer and write output to stdout - r, err := xz.NewReader(&buf) - if err != nil { - log.Fatalf("NewReader error %s", err) - } - if _, err = io.Copy(os.Stdout, r); err != nil { - log.Fatalf("io.Copy error %s", err) - } -} diff --git a/vendor/golang.org/x/sys/unix/mkasm_darwin.go b/vendor/golang.org/x/sys/unix/mkasm_darwin.go deleted file mode 100644 index 4548b993db..0000000000 --- a/vendor/golang.org/x/sys/unix/mkasm_darwin.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// mkasm_darwin.go generates assembly trampolines to call libSystem routines from Go. -//This program must be run after mksyscall.go. -package main - -import ( - "bytes" - "fmt" - "io/ioutil" - "log" - "os" - "strings" -) - -func main() { - in1, err := ioutil.ReadFile("syscall_darwin.go") - if err != nil { - log.Fatalf("can't open syscall_darwin.go: %s", err) - } - arch := os.Args[1] - in2, err := ioutil.ReadFile(fmt.Sprintf("syscall_darwin_%s.go", arch)) - if err != nil { - log.Fatalf("can't open syscall_darwin_%s.go: %s", arch, err) - } - in3, err := ioutil.ReadFile(fmt.Sprintf("zsyscall_darwin_%s.go", arch)) - if err != nil { - log.Fatalf("can't open zsyscall_darwin_%s.go: %s", arch, err) - } - in := string(in1) + string(in2) + string(in3) - - trampolines := map[string]bool{} - - var out bytes.Buffer - - fmt.Fprintf(&out, "// go run mkasm_darwin.go %s\n", strings.Join(os.Args[1:], " ")) - fmt.Fprintf(&out, "// Code generated by the command above; DO NOT EDIT.\n") - fmt.Fprintf(&out, "\n") - fmt.Fprintf(&out, "// +build go1.12\n") - fmt.Fprintf(&out, "\n") - fmt.Fprintf(&out, "#include \"textflag.h\"\n") - for _, line := range strings.Split(in, "\n") { - if !strings.HasPrefix(line, "func ") || !strings.HasSuffix(line, "_trampoline()") { - continue - } - fn := line[5 : len(line)-13] - if !trampolines[fn] { - trampolines[fn] = true - fmt.Fprintf(&out, "TEXT ·%s_trampoline(SB),NOSPLIT,$0-0\n", fn) - fmt.Fprintf(&out, "\tJMP\t%s(SB)\n", fn) - } - } - err = ioutil.WriteFile(fmt.Sprintf("zsyscall_darwin_%s.s", arch), out.Bytes(), 0644) - if err != nil { - log.Fatalf("can't write zsyscall_darwin_%s.s: %s", arch, err) - } -} diff --git a/vendor/golang.org/x/sys/unix/mkpost.go b/vendor/golang.org/x/sys/unix/mkpost.go deleted file mode 100644 index eb4332059a..0000000000 --- a/vendor/golang.org/x/sys/unix/mkpost.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// mkpost processes the output of cgo -godefs to -// modify the generated types. It is used to clean up -// the sys API in an architecture specific manner. -// -// mkpost is run after cgo -godefs; see README.md. -package main - -import ( - "bytes" - "fmt" - "go/format" - "io/ioutil" - "log" - "os" - "regexp" -) - -func main() { - // Get the OS and architecture (using GOARCH_TARGET if it exists) - goos := os.Getenv("GOOS") - goarch := os.Getenv("GOARCH_TARGET") - if goarch == "" { - goarch = os.Getenv("GOARCH") - } - // Check that we are using the Docker-based build system if we should be. - if goos == "linux" { - if os.Getenv("GOLANG_SYS_BUILD") != "docker" { - os.Stderr.WriteString("In the Docker-based build system, mkpost should not be called directly.\n") - os.Stderr.WriteString("See README.md\n") - os.Exit(1) - } - } - - b, err := ioutil.ReadAll(os.Stdin) - if err != nil { - log.Fatal(err) - } - - if goos == "aix" { - // Replace type of Atim, Mtim and Ctim by Timespec in Stat_t - // to avoid having both StTimespec and Timespec. - sttimespec := regexp.MustCompile(`_Ctype_struct_st_timespec`) - b = sttimespec.ReplaceAll(b, []byte("Timespec")) - } - - // Intentionally export __val fields in Fsid and Sigset_t - valRegex := regexp.MustCompile(`type (Fsid|Sigset_t) struct {(\s+)X__(bits|val)(\s+\S+\s+)}`) - b = valRegex.ReplaceAll(b, []byte("type $1 struct {${2}Val$4}")) - - // Intentionally export __fds_bits field in FdSet - fdSetRegex := regexp.MustCompile(`type (FdSet) struct {(\s+)X__fds_bits(\s+\S+\s+)}`) - b = fdSetRegex.ReplaceAll(b, []byte("type $1 struct {${2}Bits$3}")) - - // If we have empty Ptrace structs, we should delete them. Only s390x emits - // nonempty Ptrace structs. - ptraceRexexp := regexp.MustCompile(`type Ptrace((Psw|Fpregs|Per) struct {\s*})`) - b = ptraceRexexp.ReplaceAll(b, nil) - - // Replace the control_regs union with a blank identifier for now. - controlRegsRegex := regexp.MustCompile(`(Control_regs)\s+\[0\]uint64`) - b = controlRegsRegex.ReplaceAll(b, []byte("_ [0]uint64")) - - // Remove fields that are added by glibc - // Note that this is unstable as the identifers are private. - removeFieldsRegex := regexp.MustCompile(`X__glibc\S*`) - b = removeFieldsRegex.ReplaceAll(b, []byte("_")) - - // Convert [65]int8 to [65]byte in Utsname members to simplify - // conversion to string; see golang.org/issue/20753 - convertUtsnameRegex := regexp.MustCompile(`((Sys|Node|Domain)name|Release|Version|Machine)(\s+)\[(\d+)\]u?int8`) - b = convertUtsnameRegex.ReplaceAll(b, []byte("$1$3[$4]byte")) - - // Convert [1024]int8 to [1024]byte in Ptmget members - convertPtmget := regexp.MustCompile(`([SC]n)(\s+)\[(\d+)\]u?int8`) - b = convertPtmget.ReplaceAll(b, []byte("$1[$3]byte")) - - // Remove spare fields (e.g. in Statx_t) - spareFieldsRegex := regexp.MustCompile(`X__spare\S*`) - b = spareFieldsRegex.ReplaceAll(b, []byte("_")) - - // Remove cgo padding fields - removePaddingFieldsRegex := regexp.MustCompile(`Pad_cgo_\d+`) - b = removePaddingFieldsRegex.ReplaceAll(b, []byte("_")) - - // Remove padding, hidden, or unused fields - removeFieldsRegex = regexp.MustCompile(`\b(X_\S+|Padding)`) - b = removeFieldsRegex.ReplaceAll(b, []byte("_")) - - // Remove the first line of warning from cgo - b = b[bytes.IndexByte(b, '\n')+1:] - // Modify the command in the header to include: - // mkpost, our own warning, and a build tag. - replacement := fmt.Sprintf(`$1 | go run mkpost.go -// Code generated by the command above; see README.md. DO NOT EDIT. - -// +build %s,%s`, goarch, goos) - cgoCommandRegex := regexp.MustCompile(`(cgo -godefs .*)`) - b = cgoCommandRegex.ReplaceAll(b, []byte(replacement)) - - // Rename Stat_t time fields - if goos == "freebsd" && goarch == "386" { - // Hide Stat_t.[AMCB]tim_ext fields - renameStatTimeExtFieldsRegex := regexp.MustCompile(`[AMCB]tim_ext`) - b = renameStatTimeExtFieldsRegex.ReplaceAll(b, []byte("_")) - } - renameStatTimeFieldsRegex := regexp.MustCompile(`([AMCB])(?:irth)?time?(?:spec)?\s+(Timespec|StTimespec)`) - b = renameStatTimeFieldsRegex.ReplaceAll(b, []byte("${1}tim ${2}")) - - // gofmt - b, err = format.Source(b) - if err != nil { - log.Fatal(err) - } - - os.Stdout.Write(b) -} diff --git a/vendor/golang.org/x/sys/unix/mksyscall.go b/vendor/golang.org/x/sys/unix/mksyscall.go deleted file mode 100644 index e4af9424e9..0000000000 --- a/vendor/golang.org/x/sys/unix/mksyscall.go +++ /dev/null @@ -1,407 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -This program reads a file containing function prototypes -(like syscall_darwin.go) and generates system call bodies. -The prototypes are marked by lines beginning with "//sys" -and read like func declarations if //sys is replaced by func, but: - * The parameter lists must give a name for each argument. - This includes return parameters. - * The parameter lists must give a type for each argument: - the (x, y, z int) shorthand is not allowed. - * If the return parameter is an error number, it must be named errno. - -A line beginning with //sysnb is like //sys, except that the -goroutine will not be suspended during the execution of the system -call. This must only be used for system calls which can never -block, as otherwise the system call could cause all goroutines to -hang. -*/ -package main - -import ( - "bufio" - "flag" - "fmt" - "os" - "regexp" - "strings" -) - -var ( - b32 = flag.Bool("b32", false, "32bit big-endian") - l32 = flag.Bool("l32", false, "32bit little-endian") - plan9 = flag.Bool("plan9", false, "plan9") - openbsd = flag.Bool("openbsd", false, "openbsd") - netbsd = flag.Bool("netbsd", false, "netbsd") - dragonfly = flag.Bool("dragonfly", false, "dragonfly") - arm = flag.Bool("arm", false, "arm") // 64-bit value should use (even, odd)-pair - tags = flag.String("tags", "", "build tags") - filename = flag.String("output", "", "output file name (standard output if omitted)") -) - -// cmdLine returns this programs's commandline arguments -func cmdLine() string { - return "go run mksyscall.go " + strings.Join(os.Args[1:], " ") -} - -// buildTags returns build tags -func buildTags() string { - return *tags -} - -// Param is function parameter -type Param struct { - Name string - Type string -} - -// usage prints the program usage -func usage() { - fmt.Fprintf(os.Stderr, "usage: go run mksyscall.go [-b32 | -l32] [-tags x,y] [file ...]\n") - os.Exit(1) -} - -// parseParamList parses parameter list and returns a slice of parameters -func parseParamList(list string) []string { - list = strings.TrimSpace(list) - if list == "" { - return []string{} - } - return regexp.MustCompile(`\s*,\s*`).Split(list, -1) -} - -// parseParam splits a parameter into name and type -func parseParam(p string) Param { - ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p) - if ps == nil { - fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p) - os.Exit(1) - } - return Param{ps[1], ps[2]} -} - -func main() { - // Get the OS and architecture (using GOARCH_TARGET if it exists) - goos := os.Getenv("GOOS") - if goos == "" { - fmt.Fprintln(os.Stderr, "GOOS not defined in environment") - os.Exit(1) - } - goarch := os.Getenv("GOARCH_TARGET") - if goarch == "" { - goarch = os.Getenv("GOARCH") - } - - // Check that we are using the Docker-based build system if we should - if goos == "linux" { - if os.Getenv("GOLANG_SYS_BUILD") != "docker" { - fmt.Fprintf(os.Stderr, "In the Docker-based build system, mksyscall should not be called directly.\n") - fmt.Fprintf(os.Stderr, "See README.md\n") - os.Exit(1) - } - } - - flag.Usage = usage - flag.Parse() - if len(flag.Args()) <= 0 { - fmt.Fprintf(os.Stderr, "no files to parse provided\n") - usage() - } - - endianness := "" - if *b32 { - endianness = "big-endian" - } else if *l32 { - endianness = "little-endian" - } - - libc := false - if goos == "darwin" && strings.Contains(buildTags(), ",go1.12") { - libc = true - } - trampolines := map[string]bool{} - - text := "" - for _, path := range flag.Args() { - file, err := os.Open(path) - if err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - s := bufio.NewScanner(file) - for s.Scan() { - t := s.Text() - t = strings.TrimSpace(t) - t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `) - nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t) - if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil { - continue - } - - // Line must be of the form - // func Open(path string, mode int, perm int) (fd int, errno error) - // Split into name, in params, out params. - f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*((?i)SYS_[A-Z0-9_]+))?$`).FindStringSubmatch(t) - if f == nil { - fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t) - os.Exit(1) - } - funct, inps, outps, sysname := f[2], f[3], f[4], f[5] - - // ClockGettime doesn't have a syscall number on Darwin, only generate libc wrappers. - if goos == "darwin" && !libc && funct == "ClockGettime" { - continue - } - - // Split argument lists on comma. - in := parseParamList(inps) - out := parseParamList(outps) - - // Try in vain to keep people from editing this file. - // The theory is that they jump into the middle of the file - // without reading the header. - text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n" - - // Go function header. - outDecl := "" - if len(out) > 0 { - outDecl = fmt.Sprintf(" (%s)", strings.Join(out, ", ")) - } - text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outDecl) - - // Check if err return available - errvar := "" - for _, param := range out { - p := parseParam(param) - if p.Type == "error" { - errvar = p.Name - break - } - } - - // Prepare arguments to Syscall. - var args []string - n := 0 - for _, param := range in { - p := parseParam(param) - if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil { - args = append(args, "uintptr(unsafe.Pointer("+p.Name+"))") - } else if p.Type == "string" && errvar != "" { - text += fmt.Sprintf("\tvar _p%d *byte\n", n) - text += fmt.Sprintf("\t_p%d, %s = BytePtrFromString(%s)\n", n, errvar, p.Name) - text += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar) - args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n)) - n++ - } else if p.Type == "string" { - fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n") - text += fmt.Sprintf("\tvar _p%d *byte\n", n) - text += fmt.Sprintf("\t_p%d, _ = BytePtrFromString(%s)\n", n, p.Name) - args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n)) - n++ - } else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil { - // Convert slice into pointer, length. - // Have to be careful not to take address of &a[0] if len == 0: - // pass dummy pointer in that case. - // Used to pass nil, but some OSes or simulators reject write(fd, nil, 0). - text += fmt.Sprintf("\tvar _p%d unsafe.Pointer\n", n) - text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = unsafe.Pointer(&%s[0])\n\t}", p.Name, n, p.Name) - text += fmt.Sprintf(" else {\n\t\t_p%d = unsafe.Pointer(&_zero)\n\t}\n", n) - args = append(args, fmt.Sprintf("uintptr(_p%d)", n), fmt.Sprintf("uintptr(len(%s))", p.Name)) - n++ - } else if p.Type == "int64" && (*openbsd || *netbsd) { - args = append(args, "0") - if endianness == "big-endian" { - args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name)) - } else if endianness == "little-endian" { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name)) - } else { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name)) - } - } else if p.Type == "int64" && *dragonfly { - if regexp.MustCompile(`^(?i)extp(read|write)`).FindStringSubmatch(funct) == nil { - args = append(args, "0") - } - if endianness == "big-endian" { - args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name)) - } else if endianness == "little-endian" { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name)) - } else { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name)) - } - } else if (p.Type == "int64" || p.Type == "uint64") && endianness != "" { - if len(args)%2 == 1 && *arm { - // arm abi specifies 64-bit argument uses - // (even, odd) pair - args = append(args, "0") - } - if endianness == "big-endian" { - args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name)) - } else { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name)) - } - } else { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name)) - } - } - - // Determine which form to use; pad args with zeros. - asm := "Syscall" - if nonblock != nil { - if errvar == "" && goos == "linux" { - asm = "RawSyscallNoError" - } else { - asm = "RawSyscall" - } - } else { - if errvar == "" && goos == "linux" { - asm = "SyscallNoError" - } - } - if len(args) <= 3 { - for len(args) < 3 { - args = append(args, "0") - } - } else if len(args) <= 6 { - asm += "6" - for len(args) < 6 { - args = append(args, "0") - } - } else if len(args) <= 9 { - asm += "9" - for len(args) < 9 { - args = append(args, "0") - } - } else { - fmt.Fprintf(os.Stderr, "%s:%s too many arguments to system call\n", path, funct) - } - - // System call number. - if sysname == "" { - sysname = "SYS_" + funct - sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`) - sysname = strings.ToUpper(sysname) - } - - var libcFn string - if libc { - asm = "syscall_" + strings.ToLower(asm[:1]) + asm[1:] // internal syscall call - sysname = strings.TrimPrefix(sysname, "SYS_") // remove SYS_ - sysname = strings.ToLower(sysname) // lowercase - if sysname == "getdirentries64" { - // Special case - libSystem name and - // raw syscall name don't match. - sysname = "__getdirentries64" - } - libcFn = sysname - sysname = "funcPC(libc_" + sysname + "_trampoline)" - } - - // Actual call. - arglist := strings.Join(args, ", ") - call := fmt.Sprintf("%s(%s, %s)", asm, sysname, arglist) - - // Assign return values. - body := "" - ret := []string{"_", "_", "_"} - doErrno := false - for i := 0; i < len(out); i++ { - p := parseParam(out[i]) - reg := "" - if p.Name == "err" && !*plan9 { - reg = "e1" - ret[2] = reg - doErrno = true - } else if p.Name == "err" && *plan9 { - ret[0] = "r0" - ret[2] = "e1" - break - } else { - reg = fmt.Sprintf("r%d", i) - ret[i] = reg - } - if p.Type == "bool" { - reg = fmt.Sprintf("%s != 0", reg) - } - if p.Type == "int64" && endianness != "" { - // 64-bit number in r1:r0 or r0:r1. - if i+2 > len(out) { - fmt.Fprintf(os.Stderr, "%s:%s not enough registers for int64 return\n", path, funct) - } - if endianness == "big-endian" { - reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i, i+1) - } else { - reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i+1, i) - } - ret[i] = fmt.Sprintf("r%d", i) - ret[i+1] = fmt.Sprintf("r%d", i+1) - } - if reg != "e1" || *plan9 { - body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg) - } - } - if ret[0] == "_" && ret[1] == "_" && ret[2] == "_" { - text += fmt.Sprintf("\t%s\n", call) - } else { - if errvar == "" && goos == "linux" { - // raw syscall without error on Linux, see golang.org/issue/22924 - text += fmt.Sprintf("\t%s, %s := %s\n", ret[0], ret[1], call) - } else { - text += fmt.Sprintf("\t%s, %s, %s := %s\n", ret[0], ret[1], ret[2], call) - } - } - text += body - - if *plan9 && ret[2] == "e1" { - text += "\tif int32(r0) == -1 {\n" - text += "\t\terr = e1\n" - text += "\t}\n" - } else if doErrno { - text += "\tif e1 != 0 {\n" - text += "\t\terr = errnoErr(e1)\n" - text += "\t}\n" - } - text += "\treturn\n" - text += "}\n\n" - - if libc && !trampolines[libcFn] { - // some system calls share a trampoline, like read and readlen. - trampolines[libcFn] = true - // Declare assembly trampoline. - text += fmt.Sprintf("func libc_%s_trampoline()\n", libcFn) - // Assembly trampoline calls the libc_* function, which this magic - // redirects to use the function from libSystem. - text += fmt.Sprintf("//go:linkname libc_%s libc_%s\n", libcFn, libcFn) - text += fmt.Sprintf("//go:cgo_import_dynamic libc_%s %s \"/usr/lib/libSystem.B.dylib\"\n", libcFn, libcFn) - text += "\n" - } - } - if err := s.Err(); err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - file.Close() - } - fmt.Printf(srcTemplate, cmdLine(), buildTags(), text) -} - -const srcTemplate = `// %s -// Code generated by the command above; see README.md. DO NOT EDIT. - -// +build %s - -package unix - -import ( - "syscall" - "unsafe" -) - -var _ syscall.Errno - -%s -` diff --git a/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go b/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go deleted file mode 100644 index 3be3cdfc3b..0000000000 --- a/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc.go +++ /dev/null @@ -1,415 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -This program reads a file containing function prototypes -(like syscall_aix.go) and generates system call bodies. -The prototypes are marked by lines beginning with "//sys" -and read like func declarations if //sys is replaced by func, but: - * The parameter lists must give a name for each argument. - This includes return parameters. - * The parameter lists must give a type for each argument: - the (x, y, z int) shorthand is not allowed. - * If the return parameter is an error number, it must be named err. - * If go func name needs to be different than its libc name, - * or the function is not in libc, name could be specified - * at the end, after "=" sign, like - //sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt -*/ -package main - -import ( - "bufio" - "flag" - "fmt" - "os" - "regexp" - "strings" -) - -var ( - b32 = flag.Bool("b32", false, "32bit big-endian") - l32 = flag.Bool("l32", false, "32bit little-endian") - aix = flag.Bool("aix", false, "aix") - tags = flag.String("tags", "", "build tags") -) - -// cmdLine returns this programs's commandline arguments -func cmdLine() string { - return "go run mksyscall_aix_ppc.go " + strings.Join(os.Args[1:], " ") -} - -// buildTags returns build tags -func buildTags() string { - return *tags -} - -// Param is function parameter -type Param struct { - Name string - Type string -} - -// usage prints the program usage -func usage() { - fmt.Fprintf(os.Stderr, "usage: go run mksyscall_aix_ppc.go [-b32 | -l32] [-tags x,y] [file ...]\n") - os.Exit(1) -} - -// parseParamList parses parameter list and returns a slice of parameters -func parseParamList(list string) []string { - list = strings.TrimSpace(list) - if list == "" { - return []string{} - } - return regexp.MustCompile(`\s*,\s*`).Split(list, -1) -} - -// parseParam splits a parameter into name and type -func parseParam(p string) Param { - ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p) - if ps == nil { - fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p) - os.Exit(1) - } - return Param{ps[1], ps[2]} -} - -func main() { - flag.Usage = usage - flag.Parse() - if len(flag.Args()) <= 0 { - fmt.Fprintf(os.Stderr, "no files to parse provided\n") - usage() - } - - endianness := "" - if *b32 { - endianness = "big-endian" - } else if *l32 { - endianness = "little-endian" - } - - pack := "" - text := "" - cExtern := "/*\n#include \n#include \n" - for _, path := range flag.Args() { - file, err := os.Open(path) - if err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - s := bufio.NewScanner(file) - for s.Scan() { - t := s.Text() - t = strings.TrimSpace(t) - t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `) - if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" { - pack = p[1] - } - nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t) - if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil { - continue - } - - // Line must be of the form - // func Open(path string, mode int, perm int) (fd int, err error) - // Split into name, in params, out params. - f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t) - if f == nil { - fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t) - os.Exit(1) - } - funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6] - - // Split argument lists on comma. - in := parseParamList(inps) - out := parseParamList(outps) - - inps = strings.Join(in, ", ") - outps = strings.Join(out, ", ") - - // Try in vain to keep people from editing this file. - // The theory is that they jump into the middle of the file - // without reading the header. - text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n" - - // Check if value return, err return available - errvar := "" - retvar := "" - rettype := "" - for _, param := range out { - p := parseParam(param) - if p.Type == "error" { - errvar = p.Name - } else { - retvar = p.Name - rettype = p.Type - } - } - - // System call name. - if sysname == "" { - sysname = funct - } - sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`) - sysname = strings.ToLower(sysname) // All libc functions are lowercase. - - cRettype := "" - if rettype == "unsafe.Pointer" { - cRettype = "uintptr_t" - } else if rettype == "uintptr" { - cRettype = "uintptr_t" - } else if regexp.MustCompile(`^_`).FindStringSubmatch(rettype) != nil { - cRettype = "uintptr_t" - } else if rettype == "int" { - cRettype = "int" - } else if rettype == "int32" { - cRettype = "int" - } else if rettype == "int64" { - cRettype = "long long" - } else if rettype == "uint32" { - cRettype = "unsigned int" - } else if rettype == "uint64" { - cRettype = "unsigned long long" - } else { - cRettype = "int" - } - if sysname == "exit" { - cRettype = "void" - } - - // Change p.Types to c - var cIn []string - for _, param := range in { - p := parseParam(param) - if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil { - cIn = append(cIn, "uintptr_t") - } else if p.Type == "string" { - cIn = append(cIn, "uintptr_t") - } else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil { - cIn = append(cIn, "uintptr_t", "size_t") - } else if p.Type == "unsafe.Pointer" { - cIn = append(cIn, "uintptr_t") - } else if p.Type == "uintptr" { - cIn = append(cIn, "uintptr_t") - } else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil { - cIn = append(cIn, "uintptr_t") - } else if p.Type == "int" { - cIn = append(cIn, "int") - } else if p.Type == "int32" { - cIn = append(cIn, "int") - } else if p.Type == "int64" { - cIn = append(cIn, "long long") - } else if p.Type == "uint32" { - cIn = append(cIn, "unsigned int") - } else if p.Type == "uint64" { - cIn = append(cIn, "unsigned long long") - } else { - cIn = append(cIn, "int") - } - } - - if funct != "fcntl" && funct != "FcntlInt" && funct != "readlen" && funct != "writelen" { - if sysname == "select" { - // select is a keyword of Go. Its name is - // changed to c_select. - cExtern += "#define c_select select\n" - } - // Imports of system calls from libc - cExtern += fmt.Sprintf("%s %s", cRettype, sysname) - cIn := strings.Join(cIn, ", ") - cExtern += fmt.Sprintf("(%s);\n", cIn) - } - - // So file name. - if *aix { - if modname == "" { - modname = "libc.a/shr_64.o" - } else { - fmt.Fprintf(os.Stderr, "%s: only syscall using libc are available\n", funct) - os.Exit(1) - } - } - - strconvfunc := "C.CString" - - // Go function header. - if outps != "" { - outps = fmt.Sprintf(" (%s)", outps) - } - if text != "" { - text += "\n" - } - - text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outps) - - // Prepare arguments to Syscall. - var args []string - n := 0 - argN := 0 - for _, param := range in { - p := parseParam(param) - if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil { - args = append(args, "C.uintptr_t(uintptr(unsafe.Pointer("+p.Name+")))") - } else if p.Type == "string" && errvar != "" { - text += fmt.Sprintf("\t_p%d := uintptr(unsafe.Pointer(%s(%s)))\n", n, strconvfunc, p.Name) - args = append(args, fmt.Sprintf("C.uintptr_t(_p%d)", n)) - n++ - } else if p.Type == "string" { - fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n") - text += fmt.Sprintf("\t_p%d := uintptr(unsafe.Pointer(%s(%s)))\n", n, strconvfunc, p.Name) - args = append(args, fmt.Sprintf("C.uintptr_t(_p%d)", n)) - n++ - } else if m := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); m != nil { - // Convert slice into pointer, length. - // Have to be careful not to take address of &a[0] if len == 0: - // pass nil in that case. - text += fmt.Sprintf("\tvar _p%d *%s\n", n, m[1]) - text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name) - args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(unsafe.Pointer(_p%d)))", n)) - n++ - text += fmt.Sprintf("\tvar _p%d int\n", n) - text += fmt.Sprintf("\t_p%d = len(%s)\n", n, p.Name) - args = append(args, fmt.Sprintf("C.size_t(_p%d)", n)) - n++ - } else if p.Type == "int64" && endianness != "" { - if endianness == "big-endian" { - args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name)) - } else { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name)) - } - n++ - } else if p.Type == "bool" { - text += fmt.Sprintf("\tvar _p%d uint32\n", n) - text += fmt.Sprintf("\tif %s {\n\t\t_p%d = 1\n\t} else {\n\t\t_p%d = 0\n\t}\n", p.Name, n, n) - args = append(args, fmt.Sprintf("_p%d", n)) - } else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil { - args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(%s))", p.Name)) - } else if p.Type == "unsafe.Pointer" { - args = append(args, fmt.Sprintf("C.uintptr_t(uintptr(%s))", p.Name)) - } else if p.Type == "int" { - if (argN == 2) && ((funct == "readlen") || (funct == "writelen")) { - args = append(args, fmt.Sprintf("C.size_t(%s)", p.Name)) - } else if argN == 0 && funct == "fcntl" { - args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name)) - } else if (argN == 2) && ((funct == "fcntl") || (funct == "FcntlInt")) { - args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name)) - } else { - args = append(args, fmt.Sprintf("C.int(%s)", p.Name)) - } - } else if p.Type == "int32" { - args = append(args, fmt.Sprintf("C.int(%s)", p.Name)) - } else if p.Type == "int64" { - args = append(args, fmt.Sprintf("C.longlong(%s)", p.Name)) - } else if p.Type == "uint32" { - args = append(args, fmt.Sprintf("C.uint(%s)", p.Name)) - } else if p.Type == "uint64" { - args = append(args, fmt.Sprintf("C.ulonglong(%s)", p.Name)) - } else if p.Type == "uintptr" { - args = append(args, fmt.Sprintf("C.uintptr_t(%s)", p.Name)) - } else { - args = append(args, fmt.Sprintf("C.int(%s)", p.Name)) - } - argN++ - } - - // Actual call. - arglist := strings.Join(args, ", ") - call := "" - if sysname == "exit" { - if errvar != "" { - call += "er :=" - } else { - call += "" - } - } else if errvar != "" { - call += "r0,er :=" - } else if retvar != "" { - call += "r0,_ :=" - } else { - call += "" - } - if sysname == "select" { - // select is a keyword of Go. Its name is - // changed to c_select. - call += fmt.Sprintf("C.c_%s(%s)", sysname, arglist) - } else { - call += fmt.Sprintf("C.%s(%s)", sysname, arglist) - } - - // Assign return values. - body := "" - for i := 0; i < len(out); i++ { - p := parseParam(out[i]) - reg := "" - if p.Name == "err" { - reg = "e1" - } else { - reg = "r0" - } - if reg != "e1" { - body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg) - } - } - - // verify return - if sysname != "exit" && errvar != "" { - if regexp.MustCompile(`^uintptr`).FindStringSubmatch(cRettype) != nil { - body += "\tif (uintptr(r0) ==^uintptr(0) && er != nil) {\n" - body += fmt.Sprintf("\t\t%s = er\n", errvar) - body += "\t}\n" - } else { - body += "\tif (r0 ==-1 && er != nil) {\n" - body += fmt.Sprintf("\t\t%s = er\n", errvar) - body += "\t}\n" - } - } else if errvar != "" { - body += "\tif (er != nil) {\n" - body += fmt.Sprintf("\t\t%s = er\n", errvar) - body += "\t}\n" - } - - text += fmt.Sprintf("\t%s\n", call) - text += body - - text += "\treturn\n" - text += "}\n" - } - if err := s.Err(); err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - file.Close() - } - imp := "" - if pack != "unix" { - imp = "import \"golang.org/x/sys/unix\"\n" - - } - fmt.Printf(srcTemplate, cmdLine(), buildTags(), pack, cExtern, imp, text) -} - -const srcTemplate = `// %s -// Code generated by the command above; see README.md. DO NOT EDIT. - -// +build %s - -package %s - - -%s -*/ -import "C" -import ( - "unsafe" -) - - -%s - -%s -` diff --git a/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go b/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go deleted file mode 100644 index c960099517..0000000000 --- a/vendor/golang.org/x/sys/unix/mksyscall_aix_ppc64.go +++ /dev/null @@ -1,614 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -This program reads a file containing function prototypes -(like syscall_aix.go) and generates system call bodies. -The prototypes are marked by lines beginning with "//sys" -and read like func declarations if //sys is replaced by func, but: - * The parameter lists must give a name for each argument. - This includes return parameters. - * The parameter lists must give a type for each argument: - the (x, y, z int) shorthand is not allowed. - * If the return parameter is an error number, it must be named err. - * If go func name needs to be different than its libc name, - * or the function is not in libc, name could be specified - * at the end, after "=" sign, like - //sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt - - -This program will generate three files and handle both gc and gccgo implementation: - - zsyscall_aix_ppc64.go: the common part of each implementation (error handler, pointer creation) - - zsyscall_aix_ppc64_gc.go: gc part with //go_cgo_import_dynamic and a call to syscall6 - - zsyscall_aix_ppc64_gccgo.go: gccgo part with C function and conversion to C type. - - The generated code looks like this - -zsyscall_aix_ppc64.go -func asyscall(...) (n int, err error) { - // Pointer Creation - r1, e1 := callasyscall(...) - // Type Conversion - // Error Handler - return -} - -zsyscall_aix_ppc64_gc.go -//go:cgo_import_dynamic libc_asyscall asyscall "libc.a/shr_64.o" -//go:linkname libc_asyscall libc_asyscall -var asyscall syscallFunc - -func callasyscall(...) (r1 uintptr, e1 Errno) { - r1, _, e1 = syscall6(uintptr(unsafe.Pointer(&libc_asyscall)), "nb_args", ... ) - return -} - -zsyscall_aix_ppc64_ggcgo.go - -// int asyscall(...) - -import "C" - -func callasyscall(...) (r1 uintptr, e1 Errno) { - r1 = uintptr(C.asyscall(...)) - e1 = syscall.GetErrno() - return -} -*/ - -package main - -import ( - "bufio" - "flag" - "fmt" - "io/ioutil" - "os" - "regexp" - "strings" -) - -var ( - b32 = flag.Bool("b32", false, "32bit big-endian") - l32 = flag.Bool("l32", false, "32bit little-endian") - aix = flag.Bool("aix", false, "aix") - tags = flag.String("tags", "", "build tags") -) - -// cmdLine returns this programs's commandline arguments -func cmdLine() string { - return "go run mksyscall_aix_ppc64.go " + strings.Join(os.Args[1:], " ") -} - -// buildTags returns build tags -func buildTags() string { - return *tags -} - -// Param is function parameter -type Param struct { - Name string - Type string -} - -// usage prints the program usage -func usage() { - fmt.Fprintf(os.Stderr, "usage: go run mksyscall_aix_ppc64.go [-b32 | -l32] [-tags x,y] [file ...]\n") - os.Exit(1) -} - -// parseParamList parses parameter list and returns a slice of parameters -func parseParamList(list string) []string { - list = strings.TrimSpace(list) - if list == "" { - return []string{} - } - return regexp.MustCompile(`\s*,\s*`).Split(list, -1) -} - -// parseParam splits a parameter into name and type -func parseParam(p string) Param { - ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p) - if ps == nil { - fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p) - os.Exit(1) - } - return Param{ps[1], ps[2]} -} - -func main() { - flag.Usage = usage - flag.Parse() - if len(flag.Args()) <= 0 { - fmt.Fprintf(os.Stderr, "no files to parse provided\n") - usage() - } - - endianness := "" - if *b32 { - endianness = "big-endian" - } else if *l32 { - endianness = "little-endian" - } - - pack := "" - // GCCGO - textgccgo := "" - cExtern := "/*\n#include \n" - // GC - textgc := "" - dynimports := "" - linknames := "" - var vars []string - // COMMON - textcommon := "" - for _, path := range flag.Args() { - file, err := os.Open(path) - if err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - s := bufio.NewScanner(file) - for s.Scan() { - t := s.Text() - t = strings.TrimSpace(t) - t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `) - if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" { - pack = p[1] - } - nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t) - if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil { - continue - } - - // Line must be of the form - // func Open(path string, mode int, perm int) (fd int, err error) - // Split into name, in params, out params. - f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t) - if f == nil { - fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t) - os.Exit(1) - } - funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6] - - // Split argument lists on comma. - in := parseParamList(inps) - out := parseParamList(outps) - - inps = strings.Join(in, ", ") - outps = strings.Join(out, ", ") - - if sysname == "" { - sysname = funct - } - - onlyCommon := false - if funct == "readlen" || funct == "writelen" || funct == "FcntlInt" || funct == "FcntlFlock" { - // This function call another syscall which is already implemented. - // Therefore, the gc and gccgo part must not be generated. - onlyCommon = true - } - - // Try in vain to keep people from editing this file. - // The theory is that they jump into the middle of the file - // without reading the header. - - textcommon += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n" - if !onlyCommon { - textgccgo += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n" - textgc += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n" - } - - // Check if value return, err return available - errvar := "" - rettype := "" - for _, param := range out { - p := parseParam(param) - if p.Type == "error" { - errvar = p.Name - } else { - rettype = p.Type - } - } - - sysname = regexp.MustCompile(`([a-z])([A-Z])`).ReplaceAllString(sysname, `${1}_$2`) - sysname = strings.ToLower(sysname) // All libc functions are lowercase. - - // GCCGO Prototype return type - cRettype := "" - if rettype == "unsafe.Pointer" { - cRettype = "uintptr_t" - } else if rettype == "uintptr" { - cRettype = "uintptr_t" - } else if regexp.MustCompile(`^_`).FindStringSubmatch(rettype) != nil { - cRettype = "uintptr_t" - } else if rettype == "int" { - cRettype = "int" - } else if rettype == "int32" { - cRettype = "int" - } else if rettype == "int64" { - cRettype = "long long" - } else if rettype == "uint32" { - cRettype = "unsigned int" - } else if rettype == "uint64" { - cRettype = "unsigned long long" - } else { - cRettype = "int" - } - if sysname == "exit" { - cRettype = "void" - } - - // GCCGO Prototype arguments type - var cIn []string - for i, param := range in { - p := parseParam(param) - if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil { - cIn = append(cIn, "uintptr_t") - } else if p.Type == "string" { - cIn = append(cIn, "uintptr_t") - } else if regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type) != nil { - cIn = append(cIn, "uintptr_t", "size_t") - } else if p.Type == "unsafe.Pointer" { - cIn = append(cIn, "uintptr_t") - } else if p.Type == "uintptr" { - cIn = append(cIn, "uintptr_t") - } else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil { - cIn = append(cIn, "uintptr_t") - } else if p.Type == "int" { - if (i == 0 || i == 2) && funct == "fcntl" { - // These fcntl arguments needs to be uintptr to be able to call FcntlInt and FcntlFlock - cIn = append(cIn, "uintptr_t") - } else { - cIn = append(cIn, "int") - } - - } else if p.Type == "int32" { - cIn = append(cIn, "int") - } else if p.Type == "int64" { - cIn = append(cIn, "long long") - } else if p.Type == "uint32" { - cIn = append(cIn, "unsigned int") - } else if p.Type == "uint64" { - cIn = append(cIn, "unsigned long long") - } else { - cIn = append(cIn, "int") - } - } - - if !onlyCommon { - // GCCGO Prototype Generation - // Imports of system calls from libc - if sysname == "select" { - // select is a keyword of Go. Its name is - // changed to c_select. - cExtern += "#define c_select select\n" - } - cExtern += fmt.Sprintf("%s %s", cRettype, sysname) - cIn := strings.Join(cIn, ", ") - cExtern += fmt.Sprintf("(%s);\n", cIn) - } - // GC Library name - if modname == "" { - modname = "libc.a/shr_64.o" - } else { - fmt.Fprintf(os.Stderr, "%s: only syscall using libc are available\n", funct) - os.Exit(1) - } - sysvarname := fmt.Sprintf("libc_%s", sysname) - - if !onlyCommon { - // GC Runtime import of function to allow cross-platform builds. - dynimports += fmt.Sprintf("//go:cgo_import_dynamic %s %s \"%s\"\n", sysvarname, sysname, modname) - // GC Link symbol to proc address variable. - linknames += fmt.Sprintf("//go:linkname %s %s\n", sysvarname, sysvarname) - // GC Library proc address variable. - vars = append(vars, sysvarname) - } - - strconvfunc := "BytePtrFromString" - strconvtype := "*byte" - - // Go function header. - if outps != "" { - outps = fmt.Sprintf(" (%s)", outps) - } - if textcommon != "" { - textcommon += "\n" - } - - textcommon += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outps) - - // Prepare arguments tocall. - var argscommon []string // Arguments in the common part - var argscall []string // Arguments for call prototype - var argsgc []string // Arguments for gc call (with syscall6) - var argsgccgo []string // Arguments for gccgo call (with C.name_of_syscall) - n := 0 - argN := 0 - for _, param := range in { - p := parseParam(param) - if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil { - argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(%s))", p.Name)) - argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name)) - argsgc = append(argsgc, p.Name) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name)) - } else if p.Type == "string" && errvar != "" { - textcommon += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype) - textcommon += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name) - textcommon += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar) - - argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n)) - argscall = append(argscall, fmt.Sprintf("_p%d uintptr ", n)) - argsgc = append(argsgc, fmt.Sprintf("_p%d", n)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n)) - n++ - } else if p.Type == "string" { - fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n") - textcommon += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype) - textcommon += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name) - textcommon += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar) - - argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n)) - argscall = append(argscall, fmt.Sprintf("_p%d uintptr", n)) - argsgc = append(argsgc, fmt.Sprintf("_p%d", n)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n)) - n++ - } else if m := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); m != nil { - // Convert slice into pointer, length. - // Have to be careful not to take address of &a[0] if len == 0: - // pass nil in that case. - textcommon += fmt.Sprintf("\tvar _p%d *%s\n", n, m[1]) - textcommon += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name) - argscommon = append(argscommon, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n), fmt.Sprintf("len(%s)", p.Name)) - argscall = append(argscall, fmt.Sprintf("_p%d uintptr", n), fmt.Sprintf("_lenp%d int", n)) - argsgc = append(argsgc, fmt.Sprintf("_p%d", n), fmt.Sprintf("uintptr(_lenp%d)", n)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(_p%d)", n), fmt.Sprintf("C.size_t(_lenp%d)", n)) - n++ - } else if p.Type == "int64" && endianness != "" { - fmt.Fprintf(os.Stderr, path+":"+funct+" uses int64 with 32 bits mode. Case not yet implemented\n") - } else if p.Type == "bool" { - fmt.Fprintf(os.Stderr, path+":"+funct+" uses bool. Case not yet implemented\n") - } else if regexp.MustCompile(`^_`).FindStringSubmatch(p.Type) != nil || p.Type == "unsafe.Pointer" { - argscommon = append(argscommon, fmt.Sprintf("uintptr(%s)", p.Name)) - argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name)) - argsgc = append(argsgc, p.Name) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name)) - } else if p.Type == "int" { - if (argN == 0 || argN == 2) && ((funct == "fcntl") || (funct == "FcntlInt") || (funct == "FcntlFlock")) { - // These fcntl arguments need to be uintptr to be able to call FcntlInt and FcntlFlock - argscommon = append(argscommon, fmt.Sprintf("uintptr(%s)", p.Name)) - argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name)) - argsgc = append(argsgc, p.Name) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name)) - - } else { - argscommon = append(argscommon, p.Name) - argscall = append(argscall, fmt.Sprintf("%s int", p.Name)) - argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name)) - } - } else if p.Type == "int32" { - argscommon = append(argscommon, p.Name) - argscall = append(argscall, fmt.Sprintf("%s int32", p.Name)) - argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name)) - } else if p.Type == "int64" { - argscommon = append(argscommon, p.Name) - argscall = append(argscall, fmt.Sprintf("%s int64", p.Name)) - argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.longlong(%s)", p.Name)) - } else if p.Type == "uint32" { - argscommon = append(argscommon, p.Name) - argscall = append(argscall, fmt.Sprintf("%s uint32", p.Name)) - argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.uint(%s)", p.Name)) - } else if p.Type == "uint64" { - argscommon = append(argscommon, p.Name) - argscall = append(argscall, fmt.Sprintf("%s uint64", p.Name)) - argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.ulonglong(%s)", p.Name)) - } else if p.Type == "uintptr" { - argscommon = append(argscommon, p.Name) - argscall = append(argscall, fmt.Sprintf("%s uintptr", p.Name)) - argsgc = append(argsgc, p.Name) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.uintptr_t(%s)", p.Name)) - } else { - argscommon = append(argscommon, fmt.Sprintf("int(%s)", p.Name)) - argscall = append(argscall, fmt.Sprintf("%s int", p.Name)) - argsgc = append(argsgc, fmt.Sprintf("uintptr(%s)", p.Name)) - argsgccgo = append(argsgccgo, fmt.Sprintf("C.int(%s)", p.Name)) - } - argN++ - } - nargs := len(argsgc) - - // COMMON function generation - argscommonlist := strings.Join(argscommon, ", ") - callcommon := fmt.Sprintf("call%s(%s)", sysname, argscommonlist) - ret := []string{"_", "_"} - body := "" - doErrno := false - for i := 0; i < len(out); i++ { - p := parseParam(out[i]) - reg := "" - if p.Name == "err" { - reg = "e1" - ret[1] = reg - doErrno = true - } else { - reg = "r0" - ret[0] = reg - } - if p.Type == "bool" { - reg = fmt.Sprintf("%s != 0", reg) - } - if reg != "e1" { - body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg) - } - } - if ret[0] == "_" && ret[1] == "_" { - textcommon += fmt.Sprintf("\t%s\n", callcommon) - } else { - textcommon += fmt.Sprintf("\t%s, %s := %s\n", ret[0], ret[1], callcommon) - } - textcommon += body - - if doErrno { - textcommon += "\tif e1 != 0 {\n" - textcommon += "\t\terr = errnoErr(e1)\n" - textcommon += "\t}\n" - } - textcommon += "\treturn\n" - textcommon += "}\n" - - if onlyCommon { - continue - } - - // CALL Prototype - callProto := fmt.Sprintf("func call%s(%s) (r1 uintptr, e1 Errno) {\n", sysname, strings.Join(argscall, ", ")) - - // GC function generation - asm := "syscall6" - if nonblock != nil { - asm = "rawSyscall6" - } - - if len(argsgc) <= 6 { - for len(argsgc) < 6 { - argsgc = append(argsgc, "0") - } - } else { - fmt.Fprintf(os.Stderr, "%s: too many arguments to system call", funct) - os.Exit(1) - } - argsgclist := strings.Join(argsgc, ", ") - callgc := fmt.Sprintf("%s(uintptr(unsafe.Pointer(&%s)), %d, %s)", asm, sysvarname, nargs, argsgclist) - - textgc += callProto - textgc += fmt.Sprintf("\tr1, _, e1 = %s\n", callgc) - textgc += "\treturn\n}\n" - - // GCCGO function generation - argsgccgolist := strings.Join(argsgccgo, ", ") - var callgccgo string - if sysname == "select" { - // select is a keyword of Go. Its name is - // changed to c_select. - callgccgo = fmt.Sprintf("C.c_%s(%s)", sysname, argsgccgolist) - } else { - callgccgo = fmt.Sprintf("C.%s(%s)", sysname, argsgccgolist) - } - textgccgo += callProto - textgccgo += fmt.Sprintf("\tr1 = uintptr(%s)\n", callgccgo) - textgccgo += "\te1 = syscall.GetErrno()\n" - textgccgo += "\treturn\n}\n" - } - if err := s.Err(); err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - file.Close() - } - imp := "" - if pack != "unix" { - imp = "import \"golang.org/x/sys/unix\"\n" - - } - - // Print zsyscall_aix_ppc64.go - err := ioutil.WriteFile("zsyscall_aix_ppc64.go", - []byte(fmt.Sprintf(srcTemplate1, cmdLine(), buildTags(), pack, imp, textcommon)), - 0644) - if err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - - // Print zsyscall_aix_ppc64_gc.go - vardecls := "\t" + strings.Join(vars, ",\n\t") - vardecls += " syscallFunc" - err = ioutil.WriteFile("zsyscall_aix_ppc64_gc.go", - []byte(fmt.Sprintf(srcTemplate2, cmdLine(), buildTags(), pack, imp, dynimports, linknames, vardecls, textgc)), - 0644) - if err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - - // Print zsyscall_aix_ppc64_gccgo.go - err = ioutil.WriteFile("zsyscall_aix_ppc64_gccgo.go", - []byte(fmt.Sprintf(srcTemplate3, cmdLine(), buildTags(), pack, cExtern, imp, textgccgo)), - 0644) - if err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } -} - -const srcTemplate1 = `// %s -// Code generated by the command above; see README.md. DO NOT EDIT. - -// +build %s - -package %s - -import ( - "unsafe" -) - - -%s - -%s -` -const srcTemplate2 = `// %s -// Code generated by the command above; see README.md. DO NOT EDIT. - -// +build %s -// +build !gccgo - -package %s - -import ( - "unsafe" -) -%s -%s -%s -type syscallFunc uintptr - -var ( -%s -) - -// Implemented in runtime/syscall_aix.go. -func rawSyscall6(trap, nargs, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno) -func syscall6(trap, nargs, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno) - -%s -` -const srcTemplate3 = `// %s -// Code generated by the command above; see README.md. DO NOT EDIT. - -// +build %s -// +build gccgo - -package %s - -%s -*/ -import "C" -import ( - "syscall" -) - - -%s - -%s -` diff --git a/vendor/golang.org/x/sys/unix/mksyscall_solaris.go b/vendor/golang.org/x/sys/unix/mksyscall_solaris.go deleted file mode 100644 index 3d864738b6..0000000000 --- a/vendor/golang.org/x/sys/unix/mksyscall_solaris.go +++ /dev/null @@ -1,335 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* - This program reads a file containing function prototypes - (like syscall_solaris.go) and generates system call bodies. - The prototypes are marked by lines beginning with "//sys" - and read like func declarations if //sys is replaced by func, but: - * The parameter lists must give a name for each argument. - This includes return parameters. - * The parameter lists must give a type for each argument: - the (x, y, z int) shorthand is not allowed. - * If the return parameter is an error number, it must be named err. - * If go func name needs to be different than its libc name, - * or the function is not in libc, name could be specified - * at the end, after "=" sign, like - //sys getsockopt(s int, level int, name int, val uintptr, vallen *_Socklen) (err error) = libsocket.getsockopt -*/ - -package main - -import ( - "bufio" - "flag" - "fmt" - "os" - "regexp" - "strings" -) - -var ( - b32 = flag.Bool("b32", false, "32bit big-endian") - l32 = flag.Bool("l32", false, "32bit little-endian") - tags = flag.String("tags", "", "build tags") -) - -// cmdLine returns this programs's commandline arguments -func cmdLine() string { - return "go run mksyscall_solaris.go " + strings.Join(os.Args[1:], " ") -} - -// buildTags returns build tags -func buildTags() string { - return *tags -} - -// Param is function parameter -type Param struct { - Name string - Type string -} - -// usage prints the program usage -func usage() { - fmt.Fprintf(os.Stderr, "usage: go run mksyscall_solaris.go [-b32 | -l32] [-tags x,y] [file ...]\n") - os.Exit(1) -} - -// parseParamList parses parameter list and returns a slice of parameters -func parseParamList(list string) []string { - list = strings.TrimSpace(list) - if list == "" { - return []string{} - } - return regexp.MustCompile(`\s*,\s*`).Split(list, -1) -} - -// parseParam splits a parameter into name and type -func parseParam(p string) Param { - ps := regexp.MustCompile(`^(\S*) (\S*)$`).FindStringSubmatch(p) - if ps == nil { - fmt.Fprintf(os.Stderr, "malformed parameter: %s\n", p) - os.Exit(1) - } - return Param{ps[1], ps[2]} -} - -func main() { - flag.Usage = usage - flag.Parse() - if len(flag.Args()) <= 0 { - fmt.Fprintf(os.Stderr, "no files to parse provided\n") - usage() - } - - endianness := "" - if *b32 { - endianness = "big-endian" - } else if *l32 { - endianness = "little-endian" - } - - pack := "" - text := "" - dynimports := "" - linknames := "" - var vars []string - for _, path := range flag.Args() { - file, err := os.Open(path) - if err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - s := bufio.NewScanner(file) - for s.Scan() { - t := s.Text() - t = strings.TrimSpace(t) - t = regexp.MustCompile(`\s+`).ReplaceAllString(t, ` `) - if p := regexp.MustCompile(`^package (\S+)$`).FindStringSubmatch(t); p != nil && pack == "" { - pack = p[1] - } - nonblock := regexp.MustCompile(`^\/\/sysnb `).FindStringSubmatch(t) - if regexp.MustCompile(`^\/\/sys `).FindStringSubmatch(t) == nil && nonblock == nil { - continue - } - - // Line must be of the form - // func Open(path string, mode int, perm int) (fd int, err error) - // Split into name, in params, out params. - f := regexp.MustCompile(`^\/\/sys(nb)? (\w+)\(([^()]*)\)\s*(?:\(([^()]+)\))?\s*(?:=\s*(?:(\w*)\.)?(\w*))?$`).FindStringSubmatch(t) - if f == nil { - fmt.Fprintf(os.Stderr, "%s:%s\nmalformed //sys declaration\n", path, t) - os.Exit(1) - } - funct, inps, outps, modname, sysname := f[2], f[3], f[4], f[5], f[6] - - // Split argument lists on comma. - in := parseParamList(inps) - out := parseParamList(outps) - - inps = strings.Join(in, ", ") - outps = strings.Join(out, ", ") - - // Try in vain to keep people from editing this file. - // The theory is that they jump into the middle of the file - // without reading the header. - text += "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT\n\n" - - // So file name. - if modname == "" { - modname = "libc" - } - - // System call name. - if sysname == "" { - sysname = funct - } - - // System call pointer variable name. - sysvarname := fmt.Sprintf("proc%s", sysname) - - strconvfunc := "BytePtrFromString" - strconvtype := "*byte" - - sysname = strings.ToLower(sysname) // All libc functions are lowercase. - - // Runtime import of function to allow cross-platform builds. - dynimports += fmt.Sprintf("//go:cgo_import_dynamic libc_%s %s \"%s.so\"\n", sysname, sysname, modname) - // Link symbol to proc address variable. - linknames += fmt.Sprintf("//go:linkname %s libc_%s\n", sysvarname, sysname) - // Library proc address variable. - vars = append(vars, sysvarname) - - // Go function header. - outlist := strings.Join(out, ", ") - if outlist != "" { - outlist = fmt.Sprintf(" (%s)", outlist) - } - if text != "" { - text += "\n" - } - text += fmt.Sprintf("func %s(%s)%s {\n", funct, strings.Join(in, ", "), outlist) - - // Check if err return available - errvar := "" - for _, param := range out { - p := parseParam(param) - if p.Type == "error" { - errvar = p.Name - continue - } - } - - // Prepare arguments to Syscall. - var args []string - n := 0 - for _, param := range in { - p := parseParam(param) - if regexp.MustCompile(`^\*`).FindStringSubmatch(p.Type) != nil { - args = append(args, "uintptr(unsafe.Pointer("+p.Name+"))") - } else if p.Type == "string" && errvar != "" { - text += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype) - text += fmt.Sprintf("\t_p%d, %s = %s(%s)\n", n, errvar, strconvfunc, p.Name) - text += fmt.Sprintf("\tif %s != nil {\n\t\treturn\n\t}\n", errvar) - args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n)) - n++ - } else if p.Type == "string" { - fmt.Fprintf(os.Stderr, path+":"+funct+" uses string arguments, but has no error return\n") - text += fmt.Sprintf("\tvar _p%d %s\n", n, strconvtype) - text += fmt.Sprintf("\t_p%d, _ = %s(%s)\n", n, strconvfunc, p.Name) - args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n)) - n++ - } else if s := regexp.MustCompile(`^\[\](.*)`).FindStringSubmatch(p.Type); s != nil { - // Convert slice into pointer, length. - // Have to be careful not to take address of &a[0] if len == 0: - // pass nil in that case. - text += fmt.Sprintf("\tvar _p%d *%s\n", n, s[1]) - text += fmt.Sprintf("\tif len(%s) > 0 {\n\t\t_p%d = &%s[0]\n\t}\n", p.Name, n, p.Name) - args = append(args, fmt.Sprintf("uintptr(unsafe.Pointer(_p%d))", n), fmt.Sprintf("uintptr(len(%s))", p.Name)) - n++ - } else if p.Type == "int64" && endianness != "" { - if endianness == "big-endian" { - args = append(args, fmt.Sprintf("uintptr(%s>>32)", p.Name), fmt.Sprintf("uintptr(%s)", p.Name)) - } else { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name), fmt.Sprintf("uintptr(%s>>32)", p.Name)) - } - } else if p.Type == "bool" { - text += fmt.Sprintf("\tvar _p%d uint32\n", n) - text += fmt.Sprintf("\tif %s {\n\t\t_p%d = 1\n\t} else {\n\t\t_p%d = 0\n\t}\n", p.Name, n, n) - args = append(args, fmt.Sprintf("uintptr(_p%d)", n)) - n++ - } else { - args = append(args, fmt.Sprintf("uintptr(%s)", p.Name)) - } - } - nargs := len(args) - - // Determine which form to use; pad args with zeros. - asm := "sysvicall6" - if nonblock != nil { - asm = "rawSysvicall6" - } - if len(args) <= 6 { - for len(args) < 6 { - args = append(args, "0") - } - } else { - fmt.Fprintf(os.Stderr, "%s: too many arguments to system call\n", path) - os.Exit(1) - } - - // Actual call. - arglist := strings.Join(args, ", ") - call := fmt.Sprintf("%s(uintptr(unsafe.Pointer(&%s)), %d, %s)", asm, sysvarname, nargs, arglist) - - // Assign return values. - body := "" - ret := []string{"_", "_", "_"} - doErrno := false - for i := 0; i < len(out); i++ { - p := parseParam(out[i]) - reg := "" - if p.Name == "err" { - reg = "e1" - ret[2] = reg - doErrno = true - } else { - reg = fmt.Sprintf("r%d", i) - ret[i] = reg - } - if p.Type == "bool" { - reg = fmt.Sprintf("%d != 0", reg) - } - if p.Type == "int64" && endianness != "" { - // 64-bit number in r1:r0 or r0:r1. - if i+2 > len(out) { - fmt.Fprintf(os.Stderr, "%s: not enough registers for int64 return\n", path) - os.Exit(1) - } - if endianness == "big-endian" { - reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i, i+1) - } else { - reg = fmt.Sprintf("int64(r%d)<<32 | int64(r%d)", i+1, i) - } - ret[i] = fmt.Sprintf("r%d", i) - ret[i+1] = fmt.Sprintf("r%d", i+1) - } - if reg != "e1" { - body += fmt.Sprintf("\t%s = %s(%s)\n", p.Name, p.Type, reg) - } - } - if ret[0] == "_" && ret[1] == "_" && ret[2] == "_" { - text += fmt.Sprintf("\t%s\n", call) - } else { - text += fmt.Sprintf("\t%s, %s, %s := %s\n", ret[0], ret[1], ret[2], call) - } - text += body - - if doErrno { - text += "\tif e1 != 0 {\n" - text += "\t\terr = e1\n" - text += "\t}\n" - } - text += "\treturn\n" - text += "}\n" - } - if err := s.Err(); err != nil { - fmt.Fprintf(os.Stderr, err.Error()) - os.Exit(1) - } - file.Close() - } - imp := "" - if pack != "unix" { - imp = "import \"golang.org/x/sys/unix\"\n" - - } - vardecls := "\t" + strings.Join(vars, ",\n\t") - vardecls += " syscallFunc" - fmt.Printf(srcTemplate, cmdLine(), buildTags(), pack, imp, dynimports, linknames, vardecls, text) -} - -const srcTemplate = `// %s -// Code generated by the command above; see README.md. DO NOT EDIT. - -// +build %s - -package %s - -import ( - "syscall" - "unsafe" -) -%s -%s -%s -var ( -%s -) - -%s -` diff --git a/vendor/golang.org/x/sys/unix/mksysctl_openbsd.go b/vendor/golang.org/x/sys/unix/mksysctl_openbsd.go deleted file mode 100644 index b6b409909c..0000000000 --- a/vendor/golang.org/x/sys/unix/mksysctl_openbsd.go +++ /dev/null @@ -1,355 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// Parse the header files for OpenBSD and generate a Go usable sysctl MIB. -// -// Build a MIB with each entry being an array containing the level, type and -// a hash that will contain additional entries if the current entry is a node. -// We then walk this MIB and create a flattened sysctl name to OID hash. - -package main - -import ( - "bufio" - "fmt" - "os" - "path/filepath" - "regexp" - "sort" - "strings" -) - -var ( - goos, goarch string -) - -// cmdLine returns this programs's commandline arguments. -func cmdLine() string { - return "go run mksysctl_openbsd.go " + strings.Join(os.Args[1:], " ") -} - -// buildTags returns build tags. -func buildTags() string { - return fmt.Sprintf("%s,%s", goarch, goos) -} - -// reMatch performs regular expression match and stores the substring slice to value pointed by m. -func reMatch(re *regexp.Regexp, str string, m *[]string) bool { - *m = re.FindStringSubmatch(str) - if *m != nil { - return true - } - return false -} - -type nodeElement struct { - n int - t string - pE *map[string]nodeElement -} - -var ( - debugEnabled bool - mib map[string]nodeElement - node *map[string]nodeElement - nodeMap map[string]string - sysCtl []string -) - -var ( - ctlNames1RE = regexp.MustCompile(`^#define\s+(CTL_NAMES)\s+{`) - ctlNames2RE = regexp.MustCompile(`^#define\s+(CTL_(.*)_NAMES)\s+{`) - ctlNames3RE = regexp.MustCompile(`^#define\s+((.*)CTL_NAMES)\s+{`) - netInetRE = regexp.MustCompile(`^netinet/`) - netInet6RE = regexp.MustCompile(`^netinet6/`) - netRE = regexp.MustCompile(`^net/`) - bracesRE = regexp.MustCompile(`{.*}`) - ctlTypeRE = regexp.MustCompile(`{\s+"(\w+)",\s+(CTLTYPE_[A-Z]+)\s+}`) - fsNetKernRE = regexp.MustCompile(`^(fs|net|kern)_`) -) - -func debug(s string) { - if debugEnabled { - fmt.Fprintln(os.Stderr, s) - } -} - -// Walk the MIB and build a sysctl name to OID mapping. -func buildSysctl(pNode *map[string]nodeElement, name string, oid []int) { - lNode := pNode // local copy of pointer to node - var keys []string - for k := range *lNode { - keys = append(keys, k) - } - sort.Strings(keys) - - for _, key := range keys { - nodename := name - if name != "" { - nodename += "." - } - nodename += key - - nodeoid := append(oid, (*pNode)[key].n) - - if (*pNode)[key].t == `CTLTYPE_NODE` { - if _, ok := nodeMap[nodename]; ok { - lNode = &mib - ctlName := nodeMap[nodename] - for _, part := range strings.Split(ctlName, ".") { - lNode = ((*lNode)[part]).pE - } - } else { - lNode = (*pNode)[key].pE - } - buildSysctl(lNode, nodename, nodeoid) - } else if (*pNode)[key].t != "" { - oidStr := []string{} - for j := range nodeoid { - oidStr = append(oidStr, fmt.Sprintf("%d", nodeoid[j])) - } - text := "\t{ \"" + nodename + "\", []_C_int{ " + strings.Join(oidStr, ", ") + " } }, \n" - sysCtl = append(sysCtl, text) - } - } -} - -func main() { - // Get the OS (using GOOS_TARGET if it exist) - goos = os.Getenv("GOOS_TARGET") - if goos == "" { - goos = os.Getenv("GOOS") - } - // Get the architecture (using GOARCH_TARGET if it exists) - goarch = os.Getenv("GOARCH_TARGET") - if goarch == "" { - goarch = os.Getenv("GOARCH") - } - // Check if GOOS and GOARCH environment variables are defined - if goarch == "" || goos == "" { - fmt.Fprintf(os.Stderr, "GOARCH or GOOS not defined in environment\n") - os.Exit(1) - } - - mib = make(map[string]nodeElement) - headers := [...]string{ - `sys/sysctl.h`, - `sys/socket.h`, - `sys/tty.h`, - `sys/malloc.h`, - `sys/mount.h`, - `sys/namei.h`, - `sys/sem.h`, - `sys/shm.h`, - `sys/vmmeter.h`, - `uvm/uvmexp.h`, - `uvm/uvm_param.h`, - `uvm/uvm_swap_encrypt.h`, - `ddb/db_var.h`, - `net/if.h`, - `net/if_pfsync.h`, - `net/pipex.h`, - `netinet/in.h`, - `netinet/icmp_var.h`, - `netinet/igmp_var.h`, - `netinet/ip_ah.h`, - `netinet/ip_carp.h`, - `netinet/ip_divert.h`, - `netinet/ip_esp.h`, - `netinet/ip_ether.h`, - `netinet/ip_gre.h`, - `netinet/ip_ipcomp.h`, - `netinet/ip_ipip.h`, - `netinet/pim_var.h`, - `netinet/tcp_var.h`, - `netinet/udp_var.h`, - `netinet6/in6.h`, - `netinet6/ip6_divert.h`, - `netinet6/pim6_var.h`, - `netinet/icmp6.h`, - `netmpls/mpls.h`, - } - - ctls := [...]string{ - `kern`, - `vm`, - `fs`, - `net`, - //debug /* Special handling required */ - `hw`, - //machdep /* Arch specific */ - `user`, - `ddb`, - //vfs /* Special handling required */ - `fs.posix`, - `kern.forkstat`, - `kern.intrcnt`, - `kern.malloc`, - `kern.nchstats`, - `kern.seminfo`, - `kern.shminfo`, - `kern.timecounter`, - `kern.tty`, - `kern.watchdog`, - `net.bpf`, - `net.ifq`, - `net.inet`, - `net.inet.ah`, - `net.inet.carp`, - `net.inet.divert`, - `net.inet.esp`, - `net.inet.etherip`, - `net.inet.gre`, - `net.inet.icmp`, - `net.inet.igmp`, - `net.inet.ip`, - `net.inet.ip.ifq`, - `net.inet.ipcomp`, - `net.inet.ipip`, - `net.inet.mobileip`, - `net.inet.pfsync`, - `net.inet.pim`, - `net.inet.tcp`, - `net.inet.udp`, - `net.inet6`, - `net.inet6.divert`, - `net.inet6.ip6`, - `net.inet6.icmp6`, - `net.inet6.pim6`, - `net.inet6.tcp6`, - `net.inet6.udp6`, - `net.mpls`, - `net.mpls.ifq`, - `net.key`, - `net.pflow`, - `net.pfsync`, - `net.pipex`, - `net.rt`, - `vm.swapencrypt`, - //vfsgenctl /* Special handling required */ - } - - // Node name "fixups" - ctlMap := map[string]string{ - "ipproto": "net.inet", - "net.inet.ipproto": "net.inet", - "net.inet6.ipv6proto": "net.inet6", - "net.inet6.ipv6": "net.inet6.ip6", - "net.inet.icmpv6": "net.inet6.icmp6", - "net.inet6.divert6": "net.inet6.divert", - "net.inet6.tcp6": "net.inet.tcp", - "net.inet6.udp6": "net.inet.udp", - "mpls": "net.mpls", - "swpenc": "vm.swapencrypt", - } - - // Node mappings - nodeMap = map[string]string{ - "net.inet.ip.ifq": "net.ifq", - "net.inet.pfsync": "net.pfsync", - "net.mpls.ifq": "net.ifq", - } - - mCtls := make(map[string]bool) - for _, ctl := range ctls { - mCtls[ctl] = true - } - - for _, header := range headers { - debug("Processing " + header) - file, err := os.Open(filepath.Join("/usr/include", header)) - if err != nil { - fmt.Fprintf(os.Stderr, "%v\n", err) - os.Exit(1) - } - s := bufio.NewScanner(file) - for s.Scan() { - var sub []string - if reMatch(ctlNames1RE, s.Text(), &sub) || - reMatch(ctlNames2RE, s.Text(), &sub) || - reMatch(ctlNames3RE, s.Text(), &sub) { - if sub[1] == `CTL_NAMES` { - // Top level. - node = &mib - } else { - // Node. - nodename := strings.ToLower(sub[2]) - ctlName := "" - if reMatch(netInetRE, header, &sub) { - ctlName = "net.inet." + nodename - } else if reMatch(netInet6RE, header, &sub) { - ctlName = "net.inet6." + nodename - } else if reMatch(netRE, header, &sub) { - ctlName = "net." + nodename - } else { - ctlName = nodename - ctlName = fsNetKernRE.ReplaceAllString(ctlName, `$1.`) - } - - if val, ok := ctlMap[ctlName]; ok { - ctlName = val - } - if _, ok := mCtls[ctlName]; !ok { - debug("Ignoring " + ctlName + "...") - continue - } - - // Walk down from the top of the MIB. - node = &mib - for _, part := range strings.Split(ctlName, ".") { - if _, ok := (*node)[part]; !ok { - debug("Missing node " + part) - (*node)[part] = nodeElement{n: 0, t: "", pE: &map[string]nodeElement{}} - } - node = (*node)[part].pE - } - } - - // Populate current node with entries. - i := -1 - for !strings.HasPrefix(s.Text(), "}") { - s.Scan() - if reMatch(bracesRE, s.Text(), &sub) { - i++ - } - if !reMatch(ctlTypeRE, s.Text(), &sub) { - continue - } - (*node)[sub[1]] = nodeElement{n: i, t: sub[2], pE: &map[string]nodeElement{}} - } - } - } - err = s.Err() - if err != nil { - fmt.Fprintf(os.Stderr, "%v\n", err) - os.Exit(1) - } - file.Close() - } - buildSysctl(&mib, "", []int{}) - - sort.Strings(sysCtl) - text := strings.Join(sysCtl, "") - - fmt.Printf(srcTemplate, cmdLine(), buildTags(), text) -} - -const srcTemplate = `// %s -// Code generated by the command above; DO NOT EDIT. - -// +build %s - -package unix - -type mibentry struct { - ctlname string - ctloid []_C_int -} - -var sysctlMib = []mibentry { -%s -} -` diff --git a/vendor/golang.org/x/sys/unix/mksysnum.go b/vendor/golang.org/x/sys/unix/mksysnum.go deleted file mode 100644 index baa6ecd850..0000000000 --- a/vendor/golang.org/x/sys/unix/mksysnum.go +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// Generate system call table for DragonFly, NetBSD, -// FreeBSD, OpenBSD or Darwin from master list -// (for example, /usr/src/sys/kern/syscalls.master or -// sys/syscall.h). -package main - -import ( - "bufio" - "fmt" - "io" - "io/ioutil" - "net/http" - "os" - "regexp" - "strings" -) - -var ( - goos, goarch string -) - -// cmdLine returns this programs's commandline arguments -func cmdLine() string { - return "go run mksysnum.go " + strings.Join(os.Args[1:], " ") -} - -// buildTags returns build tags -func buildTags() string { - return fmt.Sprintf("%s,%s", goarch, goos) -} - -func checkErr(err error) { - if err != nil { - fmt.Fprintf(os.Stderr, "%v\n", err) - os.Exit(1) - } -} - -// source string and substring slice for regexp -type re struct { - str string // source string - sub []string // matched sub-string -} - -// Match performs regular expression match -func (r *re) Match(exp string) bool { - r.sub = regexp.MustCompile(exp).FindStringSubmatch(r.str) - if r.sub != nil { - return true - } - return false -} - -// fetchFile fetches a text file from URL -func fetchFile(URL string) io.Reader { - resp, err := http.Get(URL) - checkErr(err) - defer resp.Body.Close() - body, err := ioutil.ReadAll(resp.Body) - checkErr(err) - return strings.NewReader(string(body)) -} - -// readFile reads a text file from path -func readFile(path string) io.Reader { - file, err := os.Open(os.Args[1]) - checkErr(err) - return file -} - -func format(name, num, proto string) string { - name = strings.ToUpper(name) - // There are multiple entries for enosys and nosys, so comment them out. - nm := re{str: name} - if nm.Match(`^SYS_E?NOSYS$`) { - name = fmt.Sprintf("// %s", name) - } - if name == `SYS_SYS_EXIT` { - name = `SYS_EXIT` - } - return fmt.Sprintf(" %s = %s; // %s\n", name, num, proto) -} - -func main() { - // Get the OS (using GOOS_TARGET if it exist) - goos = os.Getenv("GOOS_TARGET") - if goos == "" { - goos = os.Getenv("GOOS") - } - // Get the architecture (using GOARCH_TARGET if it exists) - goarch = os.Getenv("GOARCH_TARGET") - if goarch == "" { - goarch = os.Getenv("GOARCH") - } - // Check if GOOS and GOARCH environment variables are defined - if goarch == "" || goos == "" { - fmt.Fprintf(os.Stderr, "GOARCH or GOOS not defined in environment\n") - os.Exit(1) - } - - file := strings.TrimSpace(os.Args[1]) - var syscalls io.Reader - if strings.HasPrefix(file, "https://") || strings.HasPrefix(file, "http://") { - // Download syscalls.master file - syscalls = fetchFile(file) - } else { - syscalls = readFile(file) - } - - var text, line string - s := bufio.NewScanner(syscalls) - for s.Scan() { - t := re{str: line} - if t.Match(`^(.*)\\$`) { - // Handle continuation - line = t.sub[1] - line += strings.TrimLeft(s.Text(), " \t") - } else { - // New line - line = s.Text() - } - t = re{str: line} - if t.Match(`\\$`) { - continue - } - t = re{str: line} - - switch goos { - case "dragonfly": - if t.Match(`^([0-9]+)\s+STD\s+({ \S+\s+(\w+).*)$`) { - num, proto := t.sub[1], t.sub[2] - name := fmt.Sprintf("SYS_%s", t.sub[3]) - text += format(name, num, proto) - } - case "freebsd": - if t.Match(`^([0-9]+)\s+\S+\s+(?:(?:NO)?STD|COMPAT10)\s+({ \S+\s+(\w+).*)$`) { - num, proto := t.sub[1], t.sub[2] - name := fmt.Sprintf("SYS_%s", t.sub[3]) - text += format(name, num, proto) - } - case "openbsd": - if t.Match(`^([0-9]+)\s+STD\s+(NOLOCK\s+)?({ \S+\s+\*?(\w+).*)$`) { - num, proto, name := t.sub[1], t.sub[3], t.sub[4] - text += format(name, num, proto) - } - case "netbsd": - if t.Match(`^([0-9]+)\s+((STD)|(NOERR))\s+(RUMP\s+)?({\s+\S+\s*\*?\s*\|(\S+)\|(\S*)\|(\w+).*\s+})(\s+(\S+))?$`) { - num, proto, compat := t.sub[1], t.sub[6], t.sub[8] - name := t.sub[7] + "_" + t.sub[9] - if t.sub[11] != "" { - name = t.sub[7] + "_" + t.sub[11] - } - name = strings.ToUpper(name) - if compat == "" || compat == "13" || compat == "30" || compat == "50" { - text += fmt.Sprintf(" %s = %s; // %s\n", name, num, proto) - } - } - case "darwin": - if t.Match(`^#define\s+SYS_(\w+)\s+([0-9]+)`) { - name, num := t.sub[1], t.sub[2] - name = strings.ToUpper(name) - text += fmt.Sprintf(" SYS_%s = %s;\n", name, num) - } - default: - fmt.Fprintf(os.Stderr, "unrecognized GOOS=%s\n", goos) - os.Exit(1) - - } - } - err := s.Err() - checkErr(err) - - fmt.Printf(template, cmdLine(), buildTags(), text) -} - -const template = `// %s -// Code generated by the command above; see README.md. DO NOT EDIT. - -// +build %s - -package unix - -const( -%s)` diff --git a/vendor/golang.org/x/sys/unix/types_aix.go b/vendor/golang.org/x/sys/unix/types_aix.go deleted file mode 100644 index 40d2beede5..0000000000 --- a/vendor/golang.org/x/sys/unix/types_aix.go +++ /dev/null @@ -1,237 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore -// +build aix - -/* -Input to cgo -godefs. See also mkerrors.sh and mkall.sh -*/ - -// +godefs map struct_in_addr [4]byte /* in_addr */ -// +godefs map struct_in6_addr [16]byte /* in6_addr */ - -package unix - -/* -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -#include -#include -#include -#include - - -#include -#include - -enum { - sizeofPtr = sizeof(void*), -}; - -union sockaddr_all { - struct sockaddr s1; // this one gets used for fields - struct sockaddr_in s2; // these pad it out - struct sockaddr_in6 s3; - struct sockaddr_un s4; - struct sockaddr_dl s5; -}; - -struct sockaddr_any { - struct sockaddr addr; - char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; -}; - -*/ -import "C" - -// Machine characteristics - -const ( - SizeofPtr = C.sizeofPtr - SizeofShort = C.sizeof_short - SizeofInt = C.sizeof_int - SizeofLong = C.sizeof_long - SizeofLongLong = C.sizeof_longlong - PathMax = C.PATH_MAX -) - -// Basic types - -type ( - _C_short C.short - _C_int C.int - _C_long C.long - _C_long_long C.longlong -) - -type off64 C.off64_t -type off C.off_t -type Mode_t C.mode_t - -// Time - -type Timespec C.struct_timespec - -type Timeval C.struct_timeval - -type Timeval32 C.struct_timeval32 - -type Timex C.struct_timex - -type Time_t C.time_t - -type Tms C.struct_tms - -type Utimbuf C.struct_utimbuf - -type Timezone C.struct_timezone - -// Processes - -type Rusage C.struct_rusage - -type Rlimit C.struct_rlimit64 - -type Pid_t C.pid_t - -type _Gid_t C.gid_t - -type dev_t C.dev_t - -// Files - -type Stat_t C.struct_stat - -type StatxTimestamp C.struct_statx_timestamp - -type Statx_t C.struct_statx - -type Dirent C.struct_dirent - -// Sockets - -type RawSockaddrInet4 C.struct_sockaddr_in - -type RawSockaddrInet6 C.struct_sockaddr_in6 - -type RawSockaddrUnix C.struct_sockaddr_un - -type RawSockaddrDatalink C.struct_sockaddr_dl - -type RawSockaddr C.struct_sockaddr - -type RawSockaddrAny C.struct_sockaddr_any - -type _Socklen C.socklen_t - -type Cmsghdr C.struct_cmsghdr - -type ICMPv6Filter C.struct_icmp6_filter - -type Iovec C.struct_iovec - -type IPMreq C.struct_ip_mreq - -type IPv6Mreq C.struct_ipv6_mreq - -type IPv6MTUInfo C.struct_ip6_mtuinfo - -type Linger C.struct_linger - -type Msghdr C.struct_msghdr - -const ( - SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in - SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 - SizeofSockaddrAny = C.sizeof_struct_sockaddr_any - SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un - SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl - SizeofLinger = C.sizeof_struct_linger - SizeofIPMreq = C.sizeof_struct_ip_mreq - SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq - SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo - SizeofMsghdr = C.sizeof_struct_msghdr - SizeofCmsghdr = C.sizeof_struct_cmsghdr - SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter -) - -// Routing and interface messages - -const ( - SizeofIfMsghdr = C.sizeof_struct_if_msghdr -) - -type IfMsgHdr C.struct_if_msghdr - -// Misc - -type FdSet C.fd_set - -type Utsname C.struct_utsname - -type Ustat_t C.struct_ustat - -type Sigset_t C.sigset_t - -const ( - AT_FDCWD = C.AT_FDCWD - AT_REMOVEDIR = C.AT_REMOVEDIR - AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW -) - -// Terminal handling - -type Termios C.struct_termios - -type Termio C.struct_termio - -type Winsize C.struct_winsize - -//poll - -type PollFd struct { - Fd int32 - Events uint16 - Revents uint16 -} - -const ( - POLLERR = C.POLLERR - POLLHUP = C.POLLHUP - POLLIN = C.POLLIN - POLLNVAL = C.POLLNVAL - POLLOUT = C.POLLOUT - POLLPRI = C.POLLPRI - POLLRDBAND = C.POLLRDBAND - POLLRDNORM = C.POLLRDNORM - POLLWRBAND = C.POLLWRBAND - POLLWRNORM = C.POLLWRNORM -) - -//flock_t - -type Flock_t C.struct_flock64 - -// Statfs - -type Fsid_t C.struct_fsid_t -type Fsid64_t C.struct_fsid64_t - -type Statfs_t C.struct_statfs - -const RNDGETENTCNT = 0x80045200 diff --git a/vendor/golang.org/x/sys/unix/types_darwin.go b/vendor/golang.org/x/sys/unix/types_darwin.go deleted file mode 100644 index 155c2e692b..0000000000 --- a/vendor/golang.org/x/sys/unix/types_darwin.go +++ /dev/null @@ -1,283 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -Input to cgo -godefs. See README.md -*/ - -// +godefs map struct_in_addr [4]byte /* in_addr */ -// +godefs map struct_in6_addr [16]byte /* in6_addr */ - -package unix - -/* -#define __DARWIN_UNIX03 0 -#define KERNEL -#define _DARWIN_USE_64_BIT_INODE -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -enum { - sizeofPtr = sizeof(void*), -}; - -union sockaddr_all { - struct sockaddr s1; // this one gets used for fields - struct sockaddr_in s2; // these pad it out - struct sockaddr_in6 s3; - struct sockaddr_un s4; - struct sockaddr_dl s5; -}; - -struct sockaddr_any { - struct sockaddr addr; - char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; -}; - -*/ -import "C" - -// Machine characteristics - -const ( - SizeofPtr = C.sizeofPtr - SizeofShort = C.sizeof_short - SizeofInt = C.sizeof_int - SizeofLong = C.sizeof_long - SizeofLongLong = C.sizeof_longlong -) - -// Basic types - -type ( - _C_short C.short - _C_int C.int - _C_long C.long - _C_long_long C.longlong -) - -// Time - -type Timespec C.struct_timespec - -type Timeval C.struct_timeval - -type Timeval32 C.struct_timeval32 - -// Processes - -type Rusage C.struct_rusage - -type Rlimit C.struct_rlimit - -type _Gid_t C.gid_t - -// Files - -type Stat_t C.struct_stat64 - -type Statfs_t C.struct_statfs64 - -type Flock_t C.struct_flock - -type Fstore_t C.struct_fstore - -type Radvisory_t C.struct_radvisory - -type Fbootstraptransfer_t C.struct_fbootstraptransfer - -type Log2phys_t C.struct_log2phys - -type Fsid C.struct_fsid - -type Dirent C.struct_dirent - -// Sockets - -type RawSockaddrInet4 C.struct_sockaddr_in - -type RawSockaddrInet6 C.struct_sockaddr_in6 - -type RawSockaddrUnix C.struct_sockaddr_un - -type RawSockaddrDatalink C.struct_sockaddr_dl - -type RawSockaddr C.struct_sockaddr - -type RawSockaddrAny C.struct_sockaddr_any - -type _Socklen C.socklen_t - -type Linger C.struct_linger - -type Iovec C.struct_iovec - -type IPMreq C.struct_ip_mreq - -type IPv6Mreq C.struct_ipv6_mreq - -type Msghdr C.struct_msghdr - -type Cmsghdr C.struct_cmsghdr - -type Inet4Pktinfo C.struct_in_pktinfo - -type Inet6Pktinfo C.struct_in6_pktinfo - -type IPv6MTUInfo C.struct_ip6_mtuinfo - -type ICMPv6Filter C.struct_icmp6_filter - -const ( - SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in - SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 - SizeofSockaddrAny = C.sizeof_struct_sockaddr_any - SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un - SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl - SizeofLinger = C.sizeof_struct_linger - SizeofIPMreq = C.sizeof_struct_ip_mreq - SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq - SizeofMsghdr = C.sizeof_struct_msghdr - SizeofCmsghdr = C.sizeof_struct_cmsghdr - SizeofInet4Pktinfo = C.sizeof_struct_in_pktinfo - SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo - SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo - SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter -) - -// Ptrace requests - -const ( - PTRACE_TRACEME = C.PT_TRACE_ME - PTRACE_CONT = C.PT_CONTINUE - PTRACE_KILL = C.PT_KILL -) - -// Events (kqueue, kevent) - -type Kevent_t C.struct_kevent - -// Select - -type FdSet C.fd_set - -// Routing and interface messages - -const ( - SizeofIfMsghdr = C.sizeof_struct_if_msghdr - SizeofIfData = C.sizeof_struct_if_data - SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr - SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr - SizeofIfmaMsghdr2 = C.sizeof_struct_ifma_msghdr2 - SizeofRtMsghdr = C.sizeof_struct_rt_msghdr - SizeofRtMetrics = C.sizeof_struct_rt_metrics -) - -type IfMsghdr C.struct_if_msghdr - -type IfData C.struct_if_data - -type IfaMsghdr C.struct_ifa_msghdr - -type IfmaMsghdr C.struct_ifma_msghdr - -type IfmaMsghdr2 C.struct_ifma_msghdr2 - -type RtMsghdr C.struct_rt_msghdr - -type RtMetrics C.struct_rt_metrics - -// Berkeley packet filter - -const ( - SizeofBpfVersion = C.sizeof_struct_bpf_version - SizeofBpfStat = C.sizeof_struct_bpf_stat - SizeofBpfProgram = C.sizeof_struct_bpf_program - SizeofBpfInsn = C.sizeof_struct_bpf_insn - SizeofBpfHdr = C.sizeof_struct_bpf_hdr -) - -type BpfVersion C.struct_bpf_version - -type BpfStat C.struct_bpf_stat - -type BpfProgram C.struct_bpf_program - -type BpfInsn C.struct_bpf_insn - -type BpfHdr C.struct_bpf_hdr - -// Terminal handling - -type Termios C.struct_termios - -type Winsize C.struct_winsize - -// fchmodat-like syscalls. - -const ( - AT_FDCWD = C.AT_FDCWD - AT_REMOVEDIR = C.AT_REMOVEDIR - AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW - AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW -) - -// poll - -type PollFd C.struct_pollfd - -const ( - POLLERR = C.POLLERR - POLLHUP = C.POLLHUP - POLLIN = C.POLLIN - POLLNVAL = C.POLLNVAL - POLLOUT = C.POLLOUT - POLLPRI = C.POLLPRI - POLLRDBAND = C.POLLRDBAND - POLLRDNORM = C.POLLRDNORM - POLLWRBAND = C.POLLWRBAND - POLLWRNORM = C.POLLWRNORM -) - -// uname - -type Utsname C.struct_utsname - -// Clockinfo - -const SizeofClockinfo = C.sizeof_struct_clockinfo - -type Clockinfo C.struct_clockinfo diff --git a/vendor/golang.org/x/sys/unix/types_dragonfly.go b/vendor/golang.org/x/sys/unix/types_dragonfly.go deleted file mode 100644 index 3365dd79d0..0000000000 --- a/vendor/golang.org/x/sys/unix/types_dragonfly.go +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -Input to cgo -godefs. See README.md -*/ - -// +godefs map struct_in_addr [4]byte /* in_addr */ -// +godefs map struct_in6_addr [16]byte /* in6_addr */ - -package unix - -/* -#define KERNEL -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -enum { - sizeofPtr = sizeof(void*), -}; - -union sockaddr_all { - struct sockaddr s1; // this one gets used for fields - struct sockaddr_in s2; // these pad it out - struct sockaddr_in6 s3; - struct sockaddr_un s4; - struct sockaddr_dl s5; -}; - -struct sockaddr_any { - struct sockaddr addr; - char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; -}; - -*/ -import "C" - -// Machine characteristics - -const ( - SizeofPtr = C.sizeofPtr - SizeofShort = C.sizeof_short - SizeofInt = C.sizeof_int - SizeofLong = C.sizeof_long - SizeofLongLong = C.sizeof_longlong -) - -// Basic types - -type ( - _C_short C.short - _C_int C.int - _C_long C.long - _C_long_long C.longlong -) - -// Time - -type Timespec C.struct_timespec - -type Timeval C.struct_timeval - -// Processes - -type Rusage C.struct_rusage - -type Rlimit C.struct_rlimit - -type _Gid_t C.gid_t - -// Files - -type Stat_t C.struct_stat - -type Statfs_t C.struct_statfs - -type Flock_t C.struct_flock - -type Dirent C.struct_dirent - -type Fsid C.struct_fsid - -// File system limits - -const ( - PathMax = C.PATH_MAX -) - -// Sockets - -type RawSockaddrInet4 C.struct_sockaddr_in - -type RawSockaddrInet6 C.struct_sockaddr_in6 - -type RawSockaddrUnix C.struct_sockaddr_un - -type RawSockaddrDatalink C.struct_sockaddr_dl - -type RawSockaddr C.struct_sockaddr - -type RawSockaddrAny C.struct_sockaddr_any - -type _Socklen C.socklen_t - -type Linger C.struct_linger - -type Iovec C.struct_iovec - -type IPMreq C.struct_ip_mreq - -type IPv6Mreq C.struct_ipv6_mreq - -type Msghdr C.struct_msghdr - -type Cmsghdr C.struct_cmsghdr - -type Inet6Pktinfo C.struct_in6_pktinfo - -type IPv6MTUInfo C.struct_ip6_mtuinfo - -type ICMPv6Filter C.struct_icmp6_filter - -const ( - SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in - SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 - SizeofSockaddrAny = C.sizeof_struct_sockaddr_any - SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un - SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl - SizeofLinger = C.sizeof_struct_linger - SizeofIPMreq = C.sizeof_struct_ip_mreq - SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq - SizeofMsghdr = C.sizeof_struct_msghdr - SizeofCmsghdr = C.sizeof_struct_cmsghdr - SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo - SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo - SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter -) - -// Ptrace requests - -const ( - PTRACE_TRACEME = C.PT_TRACE_ME - PTRACE_CONT = C.PT_CONTINUE - PTRACE_KILL = C.PT_KILL -) - -// Events (kqueue, kevent) - -type Kevent_t C.struct_kevent - -// Select - -type FdSet C.fd_set - -// Routing and interface messages - -const ( - SizeofIfMsghdr = C.sizeof_struct_if_msghdr - SizeofIfData = C.sizeof_struct_if_data - SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr - SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr - SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr - SizeofRtMsghdr = C.sizeof_struct_rt_msghdr - SizeofRtMetrics = C.sizeof_struct_rt_metrics -) - -type IfMsghdr C.struct_if_msghdr - -type IfData C.struct_if_data - -type IfaMsghdr C.struct_ifa_msghdr - -type IfmaMsghdr C.struct_ifma_msghdr - -type IfAnnounceMsghdr C.struct_if_announcemsghdr - -type RtMsghdr C.struct_rt_msghdr - -type RtMetrics C.struct_rt_metrics - -// Berkeley packet filter - -const ( - SizeofBpfVersion = C.sizeof_struct_bpf_version - SizeofBpfStat = C.sizeof_struct_bpf_stat - SizeofBpfProgram = C.sizeof_struct_bpf_program - SizeofBpfInsn = C.sizeof_struct_bpf_insn - SizeofBpfHdr = C.sizeof_struct_bpf_hdr -) - -type BpfVersion C.struct_bpf_version - -type BpfStat C.struct_bpf_stat - -type BpfProgram C.struct_bpf_program - -type BpfInsn C.struct_bpf_insn - -type BpfHdr C.struct_bpf_hdr - -// Terminal handling - -type Termios C.struct_termios - -type Winsize C.struct_winsize - -// fchmodat-like syscalls. - -const ( - AT_FDCWD = C.AT_FDCWD - AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW -) - -// poll - -type PollFd C.struct_pollfd - -const ( - POLLERR = C.POLLERR - POLLHUP = C.POLLHUP - POLLIN = C.POLLIN - POLLNVAL = C.POLLNVAL - POLLOUT = C.POLLOUT - POLLPRI = C.POLLPRI - POLLRDBAND = C.POLLRDBAND - POLLRDNORM = C.POLLRDNORM - POLLWRBAND = C.POLLWRBAND - POLLWRNORM = C.POLLWRNORM -) - -// Uname - -type Utsname C.struct_utsname diff --git a/vendor/golang.org/x/sys/unix/types_freebsd.go b/vendor/golang.org/x/sys/unix/types_freebsd.go deleted file mode 100644 index a121dc3368..0000000000 --- a/vendor/golang.org/x/sys/unix/types_freebsd.go +++ /dev/null @@ -1,400 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -Input to cgo -godefs. See README.md -*/ - -// +godefs map struct_in_addr [4]byte /* in_addr */ -// +godefs map struct_in6_addr [16]byte /* in6_addr */ - -package unix - -/* -#define _WANT_FREEBSD11_STAT 1 -#define _WANT_FREEBSD11_STATFS 1 -#define _WANT_FREEBSD11_DIRENT 1 -#define _WANT_FREEBSD11_KEVENT 1 - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -enum { - sizeofPtr = sizeof(void*), -}; - -union sockaddr_all { - struct sockaddr s1; // this one gets used for fields - struct sockaddr_in s2; // these pad it out - struct sockaddr_in6 s3; - struct sockaddr_un s4; - struct sockaddr_dl s5; -}; - -struct sockaddr_any { - struct sockaddr addr; - char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; -}; - -// This structure is a duplicate of if_data on FreeBSD 8-STABLE. -// See /usr/include/net/if.h. -struct if_data8 { - u_char ifi_type; - u_char ifi_physical; - u_char ifi_addrlen; - u_char ifi_hdrlen; - u_char ifi_link_state; - u_char ifi_spare_char1; - u_char ifi_spare_char2; - u_char ifi_datalen; - u_long ifi_mtu; - u_long ifi_metric; - u_long ifi_baudrate; - u_long ifi_ipackets; - u_long ifi_ierrors; - u_long ifi_opackets; - u_long ifi_oerrors; - u_long ifi_collisions; - u_long ifi_ibytes; - u_long ifi_obytes; - u_long ifi_imcasts; - u_long ifi_omcasts; - u_long ifi_iqdrops; - u_long ifi_noproto; - u_long ifi_hwassist; -// FIXME: these are now unions, so maybe need to change definitions? -#undef ifi_epoch - time_t ifi_epoch; -#undef ifi_lastchange - struct timeval ifi_lastchange; -}; - -// This structure is a duplicate of if_msghdr on FreeBSD 8-STABLE. -// See /usr/include/net/if.h. -struct if_msghdr8 { - u_short ifm_msglen; - u_char ifm_version; - u_char ifm_type; - int ifm_addrs; - int ifm_flags; - u_short ifm_index; - struct if_data8 ifm_data; -}; -*/ -import "C" - -// Machine characteristics - -const ( - SizeofPtr = C.sizeofPtr - SizeofShort = C.sizeof_short - SizeofInt = C.sizeof_int - SizeofLong = C.sizeof_long - SizeofLongLong = C.sizeof_longlong -) - -// Basic types - -type ( - _C_short C.short - _C_int C.int - _C_long C.long - _C_long_long C.longlong -) - -// Time - -type Timespec C.struct_timespec - -type Timeval C.struct_timeval - -// Processes - -type Rusage C.struct_rusage - -type Rlimit C.struct_rlimit - -type _Gid_t C.gid_t - -// Files - -const ( - _statfsVersion = C.STATFS_VERSION - _dirblksiz = C.DIRBLKSIZ -) - -type Stat_t C.struct_stat - -type stat_freebsd11_t C.struct_freebsd11_stat - -type Statfs_t C.struct_statfs - -type statfs_freebsd11_t C.struct_freebsd11_statfs - -type Flock_t C.struct_flock - -type Dirent C.struct_dirent - -type dirent_freebsd11 C.struct_freebsd11_dirent - -type Fsid C.struct_fsid - -// File system limits - -const ( - PathMax = C.PATH_MAX -) - -// Advice to Fadvise - -const ( - FADV_NORMAL = C.POSIX_FADV_NORMAL - FADV_RANDOM = C.POSIX_FADV_RANDOM - FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL - FADV_WILLNEED = C.POSIX_FADV_WILLNEED - FADV_DONTNEED = C.POSIX_FADV_DONTNEED - FADV_NOREUSE = C.POSIX_FADV_NOREUSE -) - -// Sockets - -type RawSockaddrInet4 C.struct_sockaddr_in - -type RawSockaddrInet6 C.struct_sockaddr_in6 - -type RawSockaddrUnix C.struct_sockaddr_un - -type RawSockaddrDatalink C.struct_sockaddr_dl - -type RawSockaddr C.struct_sockaddr - -type RawSockaddrAny C.struct_sockaddr_any - -type _Socklen C.socklen_t - -type Linger C.struct_linger - -type Iovec C.struct_iovec - -type IPMreq C.struct_ip_mreq - -type IPMreqn C.struct_ip_mreqn - -type IPv6Mreq C.struct_ipv6_mreq - -type Msghdr C.struct_msghdr - -type Cmsghdr C.struct_cmsghdr - -type Inet6Pktinfo C.struct_in6_pktinfo - -type IPv6MTUInfo C.struct_ip6_mtuinfo - -type ICMPv6Filter C.struct_icmp6_filter - -const ( - SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in - SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 - SizeofSockaddrAny = C.sizeof_struct_sockaddr_any - SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un - SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl - SizeofLinger = C.sizeof_struct_linger - SizeofIPMreq = C.sizeof_struct_ip_mreq - SizeofIPMreqn = C.sizeof_struct_ip_mreqn - SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq - SizeofMsghdr = C.sizeof_struct_msghdr - SizeofCmsghdr = C.sizeof_struct_cmsghdr - SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo - SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo - SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter -) - -// Ptrace requests - -const ( - PTRACE_ATTACH = C.PT_ATTACH - PTRACE_CONT = C.PT_CONTINUE - PTRACE_DETACH = C.PT_DETACH - PTRACE_GETFPREGS = C.PT_GETFPREGS - PTRACE_GETFSBASE = C.PT_GETFSBASE - PTRACE_GETLWPLIST = C.PT_GETLWPLIST - PTRACE_GETNUMLWPS = C.PT_GETNUMLWPS - PTRACE_GETREGS = C.PT_GETREGS - PTRACE_GETXSTATE = C.PT_GETXSTATE - PTRACE_IO = C.PT_IO - PTRACE_KILL = C.PT_KILL - PTRACE_LWPEVENTS = C.PT_LWP_EVENTS - PTRACE_LWPINFO = C.PT_LWPINFO - PTRACE_SETFPREGS = C.PT_SETFPREGS - PTRACE_SETREGS = C.PT_SETREGS - PTRACE_SINGLESTEP = C.PT_STEP - PTRACE_TRACEME = C.PT_TRACE_ME -) - -const ( - PIOD_READ_D = C.PIOD_READ_D - PIOD_WRITE_D = C.PIOD_WRITE_D - PIOD_READ_I = C.PIOD_READ_I - PIOD_WRITE_I = C.PIOD_WRITE_I -) - -const ( - PL_FLAG_BORN = C.PL_FLAG_BORN - PL_FLAG_EXITED = C.PL_FLAG_EXITED - PL_FLAG_SI = C.PL_FLAG_SI -) - -const ( - TRAP_BRKPT = C.TRAP_BRKPT - TRAP_TRACE = C.TRAP_TRACE -) - -type PtraceLwpInfoStruct C.struct_ptrace_lwpinfo - -type __Siginfo C.struct___siginfo - -type Sigset_t C.sigset_t - -type Reg C.struct_reg - -type FpReg C.struct_fpreg - -type PtraceIoDesc C.struct_ptrace_io_desc - -// Events (kqueue, kevent) - -type Kevent_t C.struct_kevent_freebsd11 - -// Select - -type FdSet C.fd_set - -// Routing and interface messages - -const ( - sizeofIfMsghdr = C.sizeof_struct_if_msghdr - SizeofIfMsghdr = C.sizeof_struct_if_msghdr8 - sizeofIfData = C.sizeof_struct_if_data - SizeofIfData = C.sizeof_struct_if_data8 - SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr - SizeofIfmaMsghdr = C.sizeof_struct_ifma_msghdr - SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr - SizeofRtMsghdr = C.sizeof_struct_rt_msghdr - SizeofRtMetrics = C.sizeof_struct_rt_metrics -) - -type ifMsghdr C.struct_if_msghdr - -type IfMsghdr C.struct_if_msghdr8 - -type ifData C.struct_if_data - -type IfData C.struct_if_data8 - -type IfaMsghdr C.struct_ifa_msghdr - -type IfmaMsghdr C.struct_ifma_msghdr - -type IfAnnounceMsghdr C.struct_if_announcemsghdr - -type RtMsghdr C.struct_rt_msghdr - -type RtMetrics C.struct_rt_metrics - -// Berkeley packet filter - -const ( - SizeofBpfVersion = C.sizeof_struct_bpf_version - SizeofBpfStat = C.sizeof_struct_bpf_stat - SizeofBpfZbuf = C.sizeof_struct_bpf_zbuf - SizeofBpfProgram = C.sizeof_struct_bpf_program - SizeofBpfInsn = C.sizeof_struct_bpf_insn - SizeofBpfHdr = C.sizeof_struct_bpf_hdr - SizeofBpfZbufHeader = C.sizeof_struct_bpf_zbuf_header -) - -type BpfVersion C.struct_bpf_version - -type BpfStat C.struct_bpf_stat - -type BpfZbuf C.struct_bpf_zbuf - -type BpfProgram C.struct_bpf_program - -type BpfInsn C.struct_bpf_insn - -type BpfHdr C.struct_bpf_hdr - -type BpfZbufHeader C.struct_bpf_zbuf_header - -// Terminal handling - -type Termios C.struct_termios - -type Winsize C.struct_winsize - -// fchmodat-like syscalls. - -const ( - AT_FDCWD = C.AT_FDCWD - AT_REMOVEDIR = C.AT_REMOVEDIR - AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW - AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW -) - -// poll - -type PollFd C.struct_pollfd - -const ( - POLLERR = C.POLLERR - POLLHUP = C.POLLHUP - POLLIN = C.POLLIN - POLLINIGNEOF = C.POLLINIGNEOF - POLLNVAL = C.POLLNVAL - POLLOUT = C.POLLOUT - POLLPRI = C.POLLPRI - POLLRDBAND = C.POLLRDBAND - POLLRDNORM = C.POLLRDNORM - POLLWRBAND = C.POLLWRBAND - POLLWRNORM = C.POLLWRNORM -) - -// Capabilities - -type CapRights C.struct_cap_rights - -// Uname - -type Utsname C.struct_utsname diff --git a/vendor/golang.org/x/sys/unix/types_netbsd.go b/vendor/golang.org/x/sys/unix/types_netbsd.go deleted file mode 100644 index 4a96d72c37..0000000000 --- a/vendor/golang.org/x/sys/unix/types_netbsd.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -Input to cgo -godefs. See README.md -*/ - -// +godefs map struct_in_addr [4]byte /* in_addr */ -// +godefs map struct_in6_addr [16]byte /* in6_addr */ - -package unix - -/* -#define KERNEL -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -enum { - sizeofPtr = sizeof(void*), -}; - -union sockaddr_all { - struct sockaddr s1; // this one gets used for fields - struct sockaddr_in s2; // these pad it out - struct sockaddr_in6 s3; - struct sockaddr_un s4; - struct sockaddr_dl s5; -}; - -struct sockaddr_any { - struct sockaddr addr; - char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; -}; - -*/ -import "C" - -// Machine characteristics - -const ( - SizeofPtr = C.sizeofPtr - SizeofShort = C.sizeof_short - SizeofInt = C.sizeof_int - SizeofLong = C.sizeof_long - SizeofLongLong = C.sizeof_longlong -) - -// Basic types - -type ( - _C_short C.short - _C_int C.int - _C_long C.long - _C_long_long C.longlong -) - -// Time - -type Timespec C.struct_timespec - -type Timeval C.struct_timeval - -// Processes - -type Rusage C.struct_rusage - -type Rlimit C.struct_rlimit - -type _Gid_t C.gid_t - -// Files - -type Stat_t C.struct_stat - -type Statfs_t C.struct_statfs - -type Flock_t C.struct_flock - -type Dirent C.struct_dirent - -type Fsid C.fsid_t - -// File system limits - -const ( - PathMax = C.PATH_MAX -) - -// Advice to Fadvise - -const ( - FADV_NORMAL = C.POSIX_FADV_NORMAL - FADV_RANDOM = C.POSIX_FADV_RANDOM - FADV_SEQUENTIAL = C.POSIX_FADV_SEQUENTIAL - FADV_WILLNEED = C.POSIX_FADV_WILLNEED - FADV_DONTNEED = C.POSIX_FADV_DONTNEED - FADV_NOREUSE = C.POSIX_FADV_NOREUSE -) - -// Sockets - -type RawSockaddrInet4 C.struct_sockaddr_in - -type RawSockaddrInet6 C.struct_sockaddr_in6 - -type RawSockaddrUnix C.struct_sockaddr_un - -type RawSockaddrDatalink C.struct_sockaddr_dl - -type RawSockaddr C.struct_sockaddr - -type RawSockaddrAny C.struct_sockaddr_any - -type _Socklen C.socklen_t - -type Linger C.struct_linger - -type Iovec C.struct_iovec - -type IPMreq C.struct_ip_mreq - -type IPv6Mreq C.struct_ipv6_mreq - -type Msghdr C.struct_msghdr - -type Cmsghdr C.struct_cmsghdr - -type Inet6Pktinfo C.struct_in6_pktinfo - -type IPv6MTUInfo C.struct_ip6_mtuinfo - -type ICMPv6Filter C.struct_icmp6_filter - -const ( - SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in - SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 - SizeofSockaddrAny = C.sizeof_struct_sockaddr_any - SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un - SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl - SizeofLinger = C.sizeof_struct_linger - SizeofIPMreq = C.sizeof_struct_ip_mreq - SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq - SizeofMsghdr = C.sizeof_struct_msghdr - SizeofCmsghdr = C.sizeof_struct_cmsghdr - SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo - SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo - SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter -) - -// Ptrace requests - -const ( - PTRACE_TRACEME = C.PT_TRACE_ME - PTRACE_CONT = C.PT_CONTINUE - PTRACE_KILL = C.PT_KILL -) - -// Events (kqueue, kevent) - -type Kevent_t C.struct_kevent - -// Select - -type FdSet C.fd_set - -// Routing and interface messages - -const ( - SizeofIfMsghdr = C.sizeof_struct_if_msghdr - SizeofIfData = C.sizeof_struct_if_data - SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr - SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr - SizeofRtMsghdr = C.sizeof_struct_rt_msghdr - SizeofRtMetrics = C.sizeof_struct_rt_metrics -) - -type IfMsghdr C.struct_if_msghdr - -type IfData C.struct_if_data - -type IfaMsghdr C.struct_ifa_msghdr - -type IfAnnounceMsghdr C.struct_if_announcemsghdr - -type RtMsghdr C.struct_rt_msghdr - -type RtMetrics C.struct_rt_metrics - -type Mclpool C.struct_mclpool - -// Berkeley packet filter - -const ( - SizeofBpfVersion = C.sizeof_struct_bpf_version - SizeofBpfStat = C.sizeof_struct_bpf_stat - SizeofBpfProgram = C.sizeof_struct_bpf_program - SizeofBpfInsn = C.sizeof_struct_bpf_insn - SizeofBpfHdr = C.sizeof_struct_bpf_hdr -) - -type BpfVersion C.struct_bpf_version - -type BpfStat C.struct_bpf_stat - -type BpfProgram C.struct_bpf_program - -type BpfInsn C.struct_bpf_insn - -type BpfHdr C.struct_bpf_hdr - -type BpfTimeval C.struct_bpf_timeval - -// Terminal handling - -type Termios C.struct_termios - -type Winsize C.struct_winsize - -type Ptmget C.struct_ptmget - -// fchmodat-like syscalls. - -const ( - AT_FDCWD = C.AT_FDCWD - AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW - AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW -) - -// poll - -type PollFd C.struct_pollfd - -const ( - POLLERR = C.POLLERR - POLLHUP = C.POLLHUP - POLLIN = C.POLLIN - POLLNVAL = C.POLLNVAL - POLLOUT = C.POLLOUT - POLLPRI = C.POLLPRI - POLLRDBAND = C.POLLRDBAND - POLLRDNORM = C.POLLRDNORM - POLLWRBAND = C.POLLWRBAND - POLLWRNORM = C.POLLWRNORM -) - -// Sysctl - -type Sysctlnode C.struct_sysctlnode - -// Uname - -type Utsname C.struct_utsname - -// Clockinfo - -const SizeofClockinfo = C.sizeof_struct_clockinfo - -type Clockinfo C.struct_clockinfo diff --git a/vendor/golang.org/x/sys/unix/types_openbsd.go b/vendor/golang.org/x/sys/unix/types_openbsd.go deleted file mode 100644 index 775cb57dc8..0000000000 --- a/vendor/golang.org/x/sys/unix/types_openbsd.go +++ /dev/null @@ -1,283 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -Input to cgo -godefs. See README.md -*/ - -// +godefs map struct_in_addr [4]byte /* in_addr */ -// +godefs map struct_in6_addr [16]byte /* in6_addr */ - -package unix - -/* -#define KERNEL -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -enum { - sizeofPtr = sizeof(void*), -}; - -union sockaddr_all { - struct sockaddr s1; // this one gets used for fields - struct sockaddr_in s2; // these pad it out - struct sockaddr_in6 s3; - struct sockaddr_un s4; - struct sockaddr_dl s5; -}; - -struct sockaddr_any { - struct sockaddr addr; - char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; -}; - -*/ -import "C" - -// Machine characteristics - -const ( - SizeofPtr = C.sizeofPtr - SizeofShort = C.sizeof_short - SizeofInt = C.sizeof_int - SizeofLong = C.sizeof_long - SizeofLongLong = C.sizeof_longlong -) - -// Basic types - -type ( - _C_short C.short - _C_int C.int - _C_long C.long - _C_long_long C.longlong -) - -// Time - -type Timespec C.struct_timespec - -type Timeval C.struct_timeval - -// Processes - -type Rusage C.struct_rusage - -type Rlimit C.struct_rlimit - -type _Gid_t C.gid_t - -// Files - -type Stat_t C.struct_stat - -type Statfs_t C.struct_statfs - -type Flock_t C.struct_flock - -type Dirent C.struct_dirent - -type Fsid C.fsid_t - -// File system limits - -const ( - PathMax = C.PATH_MAX -) - -// Sockets - -type RawSockaddrInet4 C.struct_sockaddr_in - -type RawSockaddrInet6 C.struct_sockaddr_in6 - -type RawSockaddrUnix C.struct_sockaddr_un - -type RawSockaddrDatalink C.struct_sockaddr_dl - -type RawSockaddr C.struct_sockaddr - -type RawSockaddrAny C.struct_sockaddr_any - -type _Socklen C.socklen_t - -type Linger C.struct_linger - -type Iovec C.struct_iovec - -type IPMreq C.struct_ip_mreq - -type IPv6Mreq C.struct_ipv6_mreq - -type Msghdr C.struct_msghdr - -type Cmsghdr C.struct_cmsghdr - -type Inet6Pktinfo C.struct_in6_pktinfo - -type IPv6MTUInfo C.struct_ip6_mtuinfo - -type ICMPv6Filter C.struct_icmp6_filter - -const ( - SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in - SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 - SizeofSockaddrAny = C.sizeof_struct_sockaddr_any - SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un - SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl - SizeofLinger = C.sizeof_struct_linger - SizeofIPMreq = C.sizeof_struct_ip_mreq - SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq - SizeofMsghdr = C.sizeof_struct_msghdr - SizeofCmsghdr = C.sizeof_struct_cmsghdr - SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo - SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo - SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter -) - -// Ptrace requests - -const ( - PTRACE_TRACEME = C.PT_TRACE_ME - PTRACE_CONT = C.PT_CONTINUE - PTRACE_KILL = C.PT_KILL -) - -// Events (kqueue, kevent) - -type Kevent_t C.struct_kevent - -// Select - -type FdSet C.fd_set - -// Routing and interface messages - -const ( - SizeofIfMsghdr = C.sizeof_struct_if_msghdr - SizeofIfData = C.sizeof_struct_if_data - SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr - SizeofIfAnnounceMsghdr = C.sizeof_struct_if_announcemsghdr - SizeofRtMsghdr = C.sizeof_struct_rt_msghdr - SizeofRtMetrics = C.sizeof_struct_rt_metrics -) - -type IfMsghdr C.struct_if_msghdr - -type IfData C.struct_if_data - -type IfaMsghdr C.struct_ifa_msghdr - -type IfAnnounceMsghdr C.struct_if_announcemsghdr - -type RtMsghdr C.struct_rt_msghdr - -type RtMetrics C.struct_rt_metrics - -type Mclpool C.struct_mclpool - -// Berkeley packet filter - -const ( - SizeofBpfVersion = C.sizeof_struct_bpf_version - SizeofBpfStat = C.sizeof_struct_bpf_stat - SizeofBpfProgram = C.sizeof_struct_bpf_program - SizeofBpfInsn = C.sizeof_struct_bpf_insn - SizeofBpfHdr = C.sizeof_struct_bpf_hdr -) - -type BpfVersion C.struct_bpf_version - -type BpfStat C.struct_bpf_stat - -type BpfProgram C.struct_bpf_program - -type BpfInsn C.struct_bpf_insn - -type BpfHdr C.struct_bpf_hdr - -type BpfTimeval C.struct_bpf_timeval - -// Terminal handling - -type Termios C.struct_termios - -type Winsize C.struct_winsize - -// fchmodat-like syscalls. - -const ( - AT_FDCWD = C.AT_FDCWD - AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW - AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW -) - -// poll - -type PollFd C.struct_pollfd - -const ( - POLLERR = C.POLLERR - POLLHUP = C.POLLHUP - POLLIN = C.POLLIN - POLLNVAL = C.POLLNVAL - POLLOUT = C.POLLOUT - POLLPRI = C.POLLPRI - POLLRDBAND = C.POLLRDBAND - POLLRDNORM = C.POLLRDNORM - POLLWRBAND = C.POLLWRBAND - POLLWRNORM = C.POLLWRNORM -) - -// Signal Sets - -type Sigset_t C.sigset_t - -// Uname - -type Utsname C.struct_utsname - -// Uvmexp - -const SizeofUvmexp = C.sizeof_struct_uvmexp - -type Uvmexp C.struct_uvmexp - -// Clockinfo - -const SizeofClockinfo = C.sizeof_struct_clockinfo - -type Clockinfo C.struct_clockinfo diff --git a/vendor/golang.org/x/sys/unix/types_solaris.go b/vendor/golang.org/x/sys/unix/types_solaris.go deleted file mode 100644 index 2b716f9348..0000000000 --- a/vendor/golang.org/x/sys/unix/types_solaris.go +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright 2009 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -/* -Input to cgo -godefs. See README.md -*/ - -// +godefs map struct_in_addr [4]byte /* in_addr */ -// +godefs map struct_in6_addr [16]byte /* in6_addr */ - -package unix - -/* -#define KERNEL -// These defines ensure that builds done on newer versions of Solaris are -// backwards-compatible with older versions of Solaris and -// OpenSolaris-based derivatives. -#define __USE_SUNOS_SOCKETS__ // msghdr -#define __USE_LEGACY_PROTOTYPES__ // iovec -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -enum { - sizeofPtr = sizeof(void*), -}; - -union sockaddr_all { - struct sockaddr s1; // this one gets used for fields - struct sockaddr_in s2; // these pad it out - struct sockaddr_in6 s3; - struct sockaddr_un s4; - struct sockaddr_dl s5; -}; - -struct sockaddr_any { - struct sockaddr addr; - char pad[sizeof(union sockaddr_all) - sizeof(struct sockaddr)]; -}; - -*/ -import "C" - -// Machine characteristics - -const ( - SizeofPtr = C.sizeofPtr - SizeofShort = C.sizeof_short - SizeofInt = C.sizeof_int - SizeofLong = C.sizeof_long - SizeofLongLong = C.sizeof_longlong - PathMax = C.PATH_MAX - MaxHostNameLen = C.MAXHOSTNAMELEN -) - -// Basic types - -type ( - _C_short C.short - _C_int C.int - _C_long C.long - _C_long_long C.longlong -) - -// Time - -type Timespec C.struct_timespec - -type Timeval C.struct_timeval - -type Timeval32 C.struct_timeval32 - -type Tms C.struct_tms - -type Utimbuf C.struct_utimbuf - -// Processes - -type Rusage C.struct_rusage - -type Rlimit C.struct_rlimit - -type _Gid_t C.gid_t - -// Files - -type Stat_t C.struct_stat - -type Flock_t C.struct_flock - -type Dirent C.struct_dirent - -// Filesystems - -type _Fsblkcnt_t C.fsblkcnt_t - -type Statvfs_t C.struct_statvfs - -// Sockets - -type RawSockaddrInet4 C.struct_sockaddr_in - -type RawSockaddrInet6 C.struct_sockaddr_in6 - -type RawSockaddrUnix C.struct_sockaddr_un - -type RawSockaddrDatalink C.struct_sockaddr_dl - -type RawSockaddr C.struct_sockaddr - -type RawSockaddrAny C.struct_sockaddr_any - -type _Socklen C.socklen_t - -type Linger C.struct_linger - -type Iovec C.struct_iovec - -type IPMreq C.struct_ip_mreq - -type IPv6Mreq C.struct_ipv6_mreq - -type Msghdr C.struct_msghdr - -type Cmsghdr C.struct_cmsghdr - -type Inet6Pktinfo C.struct_in6_pktinfo - -type IPv6MTUInfo C.struct_ip6_mtuinfo - -type ICMPv6Filter C.struct_icmp6_filter - -const ( - SizeofSockaddrInet4 = C.sizeof_struct_sockaddr_in - SizeofSockaddrInet6 = C.sizeof_struct_sockaddr_in6 - SizeofSockaddrAny = C.sizeof_struct_sockaddr_any - SizeofSockaddrUnix = C.sizeof_struct_sockaddr_un - SizeofSockaddrDatalink = C.sizeof_struct_sockaddr_dl - SizeofLinger = C.sizeof_struct_linger - SizeofIPMreq = C.sizeof_struct_ip_mreq - SizeofIPv6Mreq = C.sizeof_struct_ipv6_mreq - SizeofMsghdr = C.sizeof_struct_msghdr - SizeofCmsghdr = C.sizeof_struct_cmsghdr - SizeofInet6Pktinfo = C.sizeof_struct_in6_pktinfo - SizeofIPv6MTUInfo = C.sizeof_struct_ip6_mtuinfo - SizeofICMPv6Filter = C.sizeof_struct_icmp6_filter -) - -// Select - -type FdSet C.fd_set - -// Misc - -type Utsname C.struct_utsname - -type Ustat_t C.struct_ustat - -const ( - AT_FDCWD = C.AT_FDCWD - AT_SYMLINK_NOFOLLOW = C.AT_SYMLINK_NOFOLLOW - AT_SYMLINK_FOLLOW = C.AT_SYMLINK_FOLLOW - AT_REMOVEDIR = C.AT_REMOVEDIR - AT_EACCESS = C.AT_EACCESS -) - -// Routing and interface messages - -const ( - SizeofIfMsghdr = C.sizeof_struct_if_msghdr - SizeofIfData = C.sizeof_struct_if_data - SizeofIfaMsghdr = C.sizeof_struct_ifa_msghdr - SizeofRtMsghdr = C.sizeof_struct_rt_msghdr - SizeofRtMetrics = C.sizeof_struct_rt_metrics -) - -type IfMsghdr C.struct_if_msghdr - -type IfData C.struct_if_data - -type IfaMsghdr C.struct_ifa_msghdr - -type RtMsghdr C.struct_rt_msghdr - -type RtMetrics C.struct_rt_metrics - -// Berkeley packet filter - -const ( - SizeofBpfVersion = C.sizeof_struct_bpf_version - SizeofBpfStat = C.sizeof_struct_bpf_stat - SizeofBpfProgram = C.sizeof_struct_bpf_program - SizeofBpfInsn = C.sizeof_struct_bpf_insn - SizeofBpfHdr = C.sizeof_struct_bpf_hdr -) - -type BpfVersion C.struct_bpf_version - -type BpfStat C.struct_bpf_stat - -type BpfProgram C.struct_bpf_program - -type BpfInsn C.struct_bpf_insn - -type BpfTimeval C.struct_bpf_timeval - -type BpfHdr C.struct_bpf_hdr - -// Terminal handling - -type Termios C.struct_termios - -type Termio C.struct_termio - -type Winsize C.struct_winsize - -// poll - -type PollFd C.struct_pollfd - -const ( - POLLERR = C.POLLERR - POLLHUP = C.POLLHUP - POLLIN = C.POLLIN - POLLNVAL = C.POLLNVAL - POLLOUT = C.POLLOUT - POLLPRI = C.POLLPRI - POLLRDBAND = C.POLLRDBAND - POLLRDNORM = C.POLLRDNORM - POLLWRBAND = C.POLLWRBAND - POLLWRNORM = C.POLLWRNORM -) diff --git a/vendor/golang.org/x/text/unicode/bidi/gen.go b/vendor/golang.org/x/text/unicode/bidi/gen.go deleted file mode 100644 index 987fc169cc..0000000000 --- a/vendor/golang.org/x/text/unicode/bidi/gen.go +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -package main - -import ( - "flag" - "log" - - "golang.org/x/text/internal/gen" - "golang.org/x/text/internal/triegen" - "golang.org/x/text/internal/ucd" -) - -var outputFile = flag.String("out", "tables.go", "output file") - -func main() { - gen.Init() - gen.Repackage("gen_trieval.go", "trieval.go", "bidi") - gen.Repackage("gen_ranges.go", "ranges_test.go", "bidi") - - genTables() -} - -// bidiClass names and codes taken from class "bc" in -// https://www.unicode.org/Public/8.0.0/ucd/PropertyValueAliases.txt -var bidiClass = map[string]Class{ - "AL": AL, // ArabicLetter - "AN": AN, // ArabicNumber - "B": B, // ParagraphSeparator - "BN": BN, // BoundaryNeutral - "CS": CS, // CommonSeparator - "EN": EN, // EuropeanNumber - "ES": ES, // EuropeanSeparator - "ET": ET, // EuropeanTerminator - "L": L, // LeftToRight - "NSM": NSM, // NonspacingMark - "ON": ON, // OtherNeutral - "R": R, // RightToLeft - "S": S, // SegmentSeparator - "WS": WS, // WhiteSpace - - "FSI": Control, - "PDF": Control, - "PDI": Control, - "LRE": Control, - "LRI": Control, - "LRO": Control, - "RLE": Control, - "RLI": Control, - "RLO": Control, -} - -func genTables() { - if numClass > 0x0F { - log.Fatalf("Too many Class constants (%#x > 0x0F).", numClass) - } - w := gen.NewCodeWriter() - defer w.WriteVersionedGoFile(*outputFile, "bidi") - - gen.WriteUnicodeVersion(w) - - t := triegen.NewTrie("bidi") - - // Build data about bracket mapping. These bits need to be or-ed with - // any other bits. - orMask := map[rune]uint64{} - - xorMap := map[rune]int{} - xorMasks := []rune{0} // First value is no-op. - - ucd.Parse(gen.OpenUCDFile("BidiBrackets.txt"), func(p *ucd.Parser) { - r1 := p.Rune(0) - r2 := p.Rune(1) - xor := r1 ^ r2 - if _, ok := xorMap[xor]; !ok { - xorMap[xor] = len(xorMasks) - xorMasks = append(xorMasks, xor) - } - entry := uint64(xorMap[xor]) << xorMaskShift - switch p.String(2) { - case "o": - entry |= openMask - case "c", "n": - default: - log.Fatalf("Unknown bracket class %q.", p.String(2)) - } - orMask[r1] = entry - }) - - w.WriteComment(` - xorMasks contains masks to be xor-ed with brackets to get the reverse - version.`) - w.WriteVar("xorMasks", xorMasks) - - done := map[rune]bool{} - - insert := func(r rune, c Class) { - if !done[r] { - t.Insert(r, orMask[r]|uint64(c)) - done[r] = true - } - } - - // Insert the derived BiDi properties. - ucd.Parse(gen.OpenUCDFile("extracted/DerivedBidiClass.txt"), func(p *ucd.Parser) { - r := p.Rune(0) - class, ok := bidiClass[p.String(1)] - if !ok { - log.Fatalf("%U: Unknown BiDi class %q", r, p.String(1)) - } - insert(r, class) - }) - visitDefaults(insert) - - // TODO: use sparse blocks. This would reduce table size considerably - // from the looks of it. - - sz, err := t.Gen(w) - if err != nil { - log.Fatal(err) - } - w.Size += sz -} - -// dummy values to make methods in gen_common compile. The real versions -// will be generated by this file to tables.go. -var ( - xorMasks []rune -) diff --git a/vendor/golang.org/x/text/unicode/bidi/gen_ranges.go b/vendor/golang.org/x/text/unicode/bidi/gen_ranges.go deleted file mode 100644 index 02c3b505d6..0000000000 --- a/vendor/golang.org/x/text/unicode/bidi/gen_ranges.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -package main - -import ( - "unicode" - - "golang.org/x/text/internal/gen" - "golang.org/x/text/internal/ucd" - "golang.org/x/text/unicode/rangetable" -) - -// These tables are hand-extracted from: -// https://www.unicode.org/Public/8.0.0/ucd/extracted/DerivedBidiClass.txt -func visitDefaults(fn func(r rune, c Class)) { - // first write default values for ranges listed above. - visitRunes(fn, AL, []rune{ - 0x0600, 0x07BF, // Arabic - 0x08A0, 0x08FF, // Arabic Extended-A - 0xFB50, 0xFDCF, // Arabic Presentation Forms - 0xFDF0, 0xFDFF, - 0xFE70, 0xFEFF, - 0x0001EE00, 0x0001EEFF, // Arabic Mathematical Alpha Symbols - }) - visitRunes(fn, R, []rune{ - 0x0590, 0x05FF, // Hebrew - 0x07C0, 0x089F, // Nko et al. - 0xFB1D, 0xFB4F, - 0x00010800, 0x00010FFF, // Cypriot Syllabary et. al. - 0x0001E800, 0x0001EDFF, - 0x0001EF00, 0x0001EFFF, - }) - visitRunes(fn, ET, []rune{ // European Terminator - 0x20A0, 0x20Cf, // Currency symbols - }) - rangetable.Visit(unicode.Noncharacter_Code_Point, func(r rune) { - fn(r, BN) // Boundary Neutral - }) - ucd.Parse(gen.OpenUCDFile("DerivedCoreProperties.txt"), func(p *ucd.Parser) { - if p.String(1) == "Default_Ignorable_Code_Point" { - fn(p.Rune(0), BN) // Boundary Neutral - } - }) -} - -func visitRunes(fn func(r rune, c Class), c Class, runes []rune) { - for i := 0; i < len(runes); i += 2 { - lo, hi := runes[i], runes[i+1] - for j := lo; j <= hi; j++ { - fn(j, c) - } - } -} diff --git a/vendor/golang.org/x/text/unicode/bidi/gen_trieval.go b/vendor/golang.org/x/text/unicode/bidi/gen_trieval.go deleted file mode 100644 index 9cb9942894..0000000000 --- a/vendor/golang.org/x/text/unicode/bidi/gen_trieval.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -package main - -// Class is the Unicode BiDi class. Each rune has a single class. -type Class uint - -const ( - L Class = iota // LeftToRight - R // RightToLeft - EN // EuropeanNumber - ES // EuropeanSeparator - ET // EuropeanTerminator - AN // ArabicNumber - CS // CommonSeparator - B // ParagraphSeparator - S // SegmentSeparator - WS // WhiteSpace - ON // OtherNeutral - BN // BoundaryNeutral - NSM // NonspacingMark - AL // ArabicLetter - Control // Control LRO - PDI - - numClass - - LRO // LeftToRightOverride - RLO // RightToLeftOverride - LRE // LeftToRightEmbedding - RLE // RightToLeftEmbedding - PDF // PopDirectionalFormat - LRI // LeftToRightIsolate - RLI // RightToLeftIsolate - FSI // FirstStrongIsolate - PDI // PopDirectionalIsolate - - unknownClass = ^Class(0) -) - -var controlToClass = map[rune]Class{ - 0x202D: LRO, // LeftToRightOverride, - 0x202E: RLO, // RightToLeftOverride, - 0x202A: LRE, // LeftToRightEmbedding, - 0x202B: RLE, // RightToLeftEmbedding, - 0x202C: PDF, // PopDirectionalFormat, - 0x2066: LRI, // LeftToRightIsolate, - 0x2067: RLI, // RightToLeftIsolate, - 0x2068: FSI, // FirstStrongIsolate, - 0x2069: PDI, // PopDirectionalIsolate, -} - -// A trie entry has the following bits: -// 7..5 XOR mask for brackets -// 4 1: Bracket open, 0: Bracket close -// 3..0 Class type - -const ( - openMask = 0x10 - xorMaskShift = 5 -) diff --git a/vendor/golang.org/x/text/unicode/norm/maketables.go b/vendor/golang.org/x/text/unicode/norm/maketables.go deleted file mode 100644 index 30a3aa9334..0000000000 --- a/vendor/golang.org/x/text/unicode/norm/maketables.go +++ /dev/null @@ -1,986 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// Normalization table generator. -// Data read from the web. -// See forminfo.go for a description of the trie values associated with each rune. - -package main - -import ( - "bytes" - "encoding/binary" - "flag" - "fmt" - "io" - "log" - "sort" - "strconv" - "strings" - - "golang.org/x/text/internal/gen" - "golang.org/x/text/internal/triegen" - "golang.org/x/text/internal/ucd" -) - -func main() { - gen.Init() - loadUnicodeData() - compactCCC() - loadCompositionExclusions() - completeCharFields(FCanonical) - completeCharFields(FCompatibility) - computeNonStarterCounts() - verifyComputed() - printChars() - testDerived() - printTestdata() - makeTables() -} - -var ( - tablelist = flag.String("tables", - "all", - "comma-separated list of which tables to generate; "+ - "can be 'decomp', 'recomp', 'info' and 'all'") - test = flag.Bool("test", - false, - "test existing tables against DerivedNormalizationProps and generate test data for regression testing") - verbose = flag.Bool("verbose", - false, - "write data to stdout as it is parsed") -) - -const MaxChar = 0x10FFFF // anything above this shouldn't exist - -// Quick Check properties of runes allow us to quickly -// determine whether a rune may occur in a normal form. -// For a given normal form, a rune may be guaranteed to occur -// verbatim (QC=Yes), may or may not combine with another -// rune (QC=Maybe), or may not occur (QC=No). -type QCResult int - -const ( - QCUnknown QCResult = iota - QCYes - QCNo - QCMaybe -) - -func (r QCResult) String() string { - switch r { - case QCYes: - return "Yes" - case QCNo: - return "No" - case QCMaybe: - return "Maybe" - } - return "***UNKNOWN***" -} - -const ( - FCanonical = iota // NFC or NFD - FCompatibility // NFKC or NFKD - FNumberOfFormTypes -) - -const ( - MComposed = iota // NFC or NFKC - MDecomposed // NFD or NFKD - MNumberOfModes -) - -// This contains only the properties we're interested in. -type Char struct { - name string - codePoint rune // if zero, this index is not a valid code point. - ccc uint8 // canonical combining class - origCCC uint8 - excludeInComp bool // from CompositionExclusions.txt - compatDecomp bool // it has a compatibility expansion - - nTrailingNonStarters uint8 - nLeadingNonStarters uint8 // must be equal to trailing if non-zero - - forms [FNumberOfFormTypes]FormInfo // For FCanonical and FCompatibility - - state State -} - -var chars = make([]Char, MaxChar+1) -var cccMap = make(map[uint8]uint8) - -func (c Char) String() string { - buf := new(bytes.Buffer) - - fmt.Fprintf(buf, "%U [%s]:\n", c.codePoint, c.name) - fmt.Fprintf(buf, " ccc: %v\n", c.ccc) - fmt.Fprintf(buf, " excludeInComp: %v\n", c.excludeInComp) - fmt.Fprintf(buf, " compatDecomp: %v\n", c.compatDecomp) - fmt.Fprintf(buf, " state: %v\n", c.state) - fmt.Fprintf(buf, " NFC:\n") - fmt.Fprint(buf, c.forms[FCanonical]) - fmt.Fprintf(buf, " NFKC:\n") - fmt.Fprint(buf, c.forms[FCompatibility]) - - return buf.String() -} - -// In UnicodeData.txt, some ranges are marked like this: -// 3400;;Lo;0;L;;;;;N;;;;; -// 4DB5;;Lo;0;L;;;;;N;;;;; -// parseCharacter keeps a state variable indicating the weirdness. -type State int - -const ( - SNormal State = iota // known to be zero for the type - SFirst - SLast - SMissing -) - -var lastChar = rune('\u0000') - -func (c Char) isValid() bool { - return c.codePoint != 0 && c.state != SMissing -} - -type FormInfo struct { - quickCheck [MNumberOfModes]QCResult // index: MComposed or MDecomposed - verified [MNumberOfModes]bool // index: MComposed or MDecomposed - - combinesForward bool // May combine with rune on the right - combinesBackward bool // May combine with rune on the left - isOneWay bool // Never appears in result - inDecomp bool // Some decompositions result in this char. - decomp Decomposition - expandedDecomp Decomposition -} - -func (f FormInfo) String() string { - buf := bytes.NewBuffer(make([]byte, 0)) - - fmt.Fprintf(buf, " quickCheck[C]: %v\n", f.quickCheck[MComposed]) - fmt.Fprintf(buf, " quickCheck[D]: %v\n", f.quickCheck[MDecomposed]) - fmt.Fprintf(buf, " cmbForward: %v\n", f.combinesForward) - fmt.Fprintf(buf, " cmbBackward: %v\n", f.combinesBackward) - fmt.Fprintf(buf, " isOneWay: %v\n", f.isOneWay) - fmt.Fprintf(buf, " inDecomp: %v\n", f.inDecomp) - fmt.Fprintf(buf, " decomposition: %X\n", f.decomp) - fmt.Fprintf(buf, " expandedDecomp: %X\n", f.expandedDecomp) - - return buf.String() -} - -type Decomposition []rune - -func parseDecomposition(s string, skipfirst bool) (a []rune, err error) { - decomp := strings.Split(s, " ") - if len(decomp) > 0 && skipfirst { - decomp = decomp[1:] - } - for _, d := range decomp { - point, err := strconv.ParseUint(d, 16, 64) - if err != nil { - return a, err - } - a = append(a, rune(point)) - } - return a, nil -} - -func loadUnicodeData() { - f := gen.OpenUCDFile("UnicodeData.txt") - defer f.Close() - p := ucd.New(f) - for p.Next() { - r := p.Rune(ucd.CodePoint) - char := &chars[r] - - char.ccc = uint8(p.Uint(ucd.CanonicalCombiningClass)) - decmap := p.String(ucd.DecompMapping) - - exp, err := parseDecomposition(decmap, false) - isCompat := false - if err != nil { - if len(decmap) > 0 { - exp, err = parseDecomposition(decmap, true) - if err != nil { - log.Fatalf(`%U: bad decomp |%v|: "%s"`, r, decmap, err) - } - isCompat = true - } - } - - char.name = p.String(ucd.Name) - char.codePoint = r - char.forms[FCompatibility].decomp = exp - if !isCompat { - char.forms[FCanonical].decomp = exp - } else { - char.compatDecomp = true - } - if len(decmap) > 0 { - char.forms[FCompatibility].decomp = exp - } - } - if err := p.Err(); err != nil { - log.Fatal(err) - } -} - -// compactCCC converts the sparse set of CCC values to a continguous one, -// reducing the number of bits needed from 8 to 6. -func compactCCC() { - m := make(map[uint8]uint8) - for i := range chars { - c := &chars[i] - m[c.ccc] = 0 - } - cccs := []int{} - for v, _ := range m { - cccs = append(cccs, int(v)) - } - sort.Ints(cccs) - for i, c := range cccs { - cccMap[uint8(i)] = uint8(c) - m[uint8(c)] = uint8(i) - } - for i := range chars { - c := &chars[i] - c.origCCC = c.ccc - c.ccc = m[c.ccc] - } - if len(m) >= 1<<6 { - log.Fatalf("too many difference CCC values: %d >= 64", len(m)) - } -} - -// CompositionExclusions.txt has form: -// 0958 # ... -// See https://unicode.org/reports/tr44/ for full explanation -func loadCompositionExclusions() { - f := gen.OpenUCDFile("CompositionExclusions.txt") - defer f.Close() - p := ucd.New(f) - for p.Next() { - c := &chars[p.Rune(0)] - if c.excludeInComp { - log.Fatalf("%U: Duplicate entry in exclusions.", c.codePoint) - } - c.excludeInComp = true - } - if e := p.Err(); e != nil { - log.Fatal(e) - } -} - -// hasCompatDecomp returns true if any of the recursive -// decompositions contains a compatibility expansion. -// In this case, the character may not occur in NFK*. -func hasCompatDecomp(r rune) bool { - c := &chars[r] - if c.compatDecomp { - return true - } - for _, d := range c.forms[FCompatibility].decomp { - if hasCompatDecomp(d) { - return true - } - } - return false -} - -// Hangul related constants. -const ( - HangulBase = 0xAC00 - HangulEnd = 0xD7A4 // hangulBase + Jamo combinations (19 * 21 * 28) - - JamoLBase = 0x1100 - JamoLEnd = 0x1113 - JamoVBase = 0x1161 - JamoVEnd = 0x1176 - JamoTBase = 0x11A8 - JamoTEnd = 0x11C3 - - JamoLVTCount = 19 * 21 * 28 - JamoTCount = 28 -) - -func isHangul(r rune) bool { - return HangulBase <= r && r < HangulEnd -} - -func isHangulWithoutJamoT(r rune) bool { - if !isHangul(r) { - return false - } - r -= HangulBase - return r < JamoLVTCount && r%JamoTCount == 0 -} - -func ccc(r rune) uint8 { - return chars[r].ccc -} - -// Insert a rune in a buffer, ordered by Canonical Combining Class. -func insertOrdered(b Decomposition, r rune) Decomposition { - n := len(b) - b = append(b, 0) - cc := ccc(r) - if cc > 0 { - // Use bubble sort. - for ; n > 0; n-- { - if ccc(b[n-1]) <= cc { - break - } - b[n] = b[n-1] - } - } - b[n] = r - return b -} - -// Recursively decompose. -func decomposeRecursive(form int, r rune, d Decomposition) Decomposition { - dcomp := chars[r].forms[form].decomp - if len(dcomp) == 0 { - return insertOrdered(d, r) - } - for _, c := range dcomp { - d = decomposeRecursive(form, c, d) - } - return d -} - -func completeCharFields(form int) { - // Phase 0: pre-expand decomposition. - for i := range chars { - f := &chars[i].forms[form] - if len(f.decomp) == 0 { - continue - } - exp := make(Decomposition, 0) - for _, c := range f.decomp { - exp = decomposeRecursive(form, c, exp) - } - f.expandedDecomp = exp - } - - // Phase 1: composition exclusion, mark decomposition. - for i := range chars { - c := &chars[i] - f := &c.forms[form] - - // Marks script-specific exclusions and version restricted. - f.isOneWay = c.excludeInComp - - // Singletons - f.isOneWay = f.isOneWay || len(f.decomp) == 1 - - // Non-starter decompositions - if len(f.decomp) > 1 { - chk := c.ccc != 0 || chars[f.decomp[0]].ccc != 0 - f.isOneWay = f.isOneWay || chk - } - - // Runes that decompose into more than two runes. - f.isOneWay = f.isOneWay || len(f.decomp) > 2 - - if form == FCompatibility { - f.isOneWay = f.isOneWay || hasCompatDecomp(c.codePoint) - } - - for _, r := range f.decomp { - chars[r].forms[form].inDecomp = true - } - } - - // Phase 2: forward and backward combining. - for i := range chars { - c := &chars[i] - f := &c.forms[form] - - if !f.isOneWay && len(f.decomp) == 2 { - f0 := &chars[f.decomp[0]].forms[form] - f1 := &chars[f.decomp[1]].forms[form] - if !f0.isOneWay { - f0.combinesForward = true - } - if !f1.isOneWay { - f1.combinesBackward = true - } - } - if isHangulWithoutJamoT(rune(i)) { - f.combinesForward = true - } - } - - // Phase 3: quick check values. - for i := range chars { - c := &chars[i] - f := &c.forms[form] - - switch { - case len(f.decomp) > 0: - f.quickCheck[MDecomposed] = QCNo - case isHangul(rune(i)): - f.quickCheck[MDecomposed] = QCNo - default: - f.quickCheck[MDecomposed] = QCYes - } - switch { - case f.isOneWay: - f.quickCheck[MComposed] = QCNo - case (i & 0xffff00) == JamoLBase: - f.quickCheck[MComposed] = QCYes - if JamoLBase <= i && i < JamoLEnd { - f.combinesForward = true - } - if JamoVBase <= i && i < JamoVEnd { - f.quickCheck[MComposed] = QCMaybe - f.combinesBackward = true - f.combinesForward = true - } - if JamoTBase <= i && i < JamoTEnd { - f.quickCheck[MComposed] = QCMaybe - f.combinesBackward = true - } - case !f.combinesBackward: - f.quickCheck[MComposed] = QCYes - default: - f.quickCheck[MComposed] = QCMaybe - } - } -} - -func computeNonStarterCounts() { - // Phase 4: leading and trailing non-starter count - for i := range chars { - c := &chars[i] - - runes := []rune{rune(i)} - // We always use FCompatibility so that the CGJ insertion points do not - // change for repeated normalizations with different forms. - if exp := c.forms[FCompatibility].expandedDecomp; len(exp) > 0 { - runes = exp - } - // We consider runes that combine backwards to be non-starters for the - // purpose of Stream-Safe Text Processing. - for _, r := range runes { - if cr := &chars[r]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward { - break - } - c.nLeadingNonStarters++ - } - for i := len(runes) - 1; i >= 0; i-- { - if cr := &chars[runes[i]]; cr.ccc == 0 && !cr.forms[FCompatibility].combinesBackward { - break - } - c.nTrailingNonStarters++ - } - if c.nTrailingNonStarters > 3 { - log.Fatalf("%U: Decomposition with more than 3 (%d) trailing modifiers (%U)", i, c.nTrailingNonStarters, runes) - } - - if isHangul(rune(i)) { - c.nTrailingNonStarters = 2 - if isHangulWithoutJamoT(rune(i)) { - c.nTrailingNonStarters = 1 - } - } - - if l, t := c.nLeadingNonStarters, c.nTrailingNonStarters; l > 0 && l != t { - log.Fatalf("%U: number of leading and trailing non-starters should be equal (%d vs %d)", i, l, t) - } - if t := c.nTrailingNonStarters; t > 3 { - log.Fatalf("%U: number of trailing non-starters is %d > 3", t) - } - } -} - -func printBytes(w io.Writer, b []byte, name string) { - fmt.Fprintf(w, "// %s: %d bytes\n", name, len(b)) - fmt.Fprintf(w, "var %s = [...]byte {", name) - for i, c := range b { - switch { - case i%64 == 0: - fmt.Fprintf(w, "\n// Bytes %x - %x\n", i, i+63) - case i%8 == 0: - fmt.Fprintf(w, "\n") - } - fmt.Fprintf(w, "0x%.2X, ", c) - } - fmt.Fprint(w, "\n}\n\n") -} - -// See forminfo.go for format. -func makeEntry(f *FormInfo, c *Char) uint16 { - e := uint16(0) - if r := c.codePoint; HangulBase <= r && r < HangulEnd { - e |= 0x40 - } - if f.combinesForward { - e |= 0x20 - } - if f.quickCheck[MDecomposed] == QCNo { - e |= 0x4 - } - switch f.quickCheck[MComposed] { - case QCYes: - case QCNo: - e |= 0x10 - case QCMaybe: - e |= 0x18 - default: - log.Fatalf("Illegal quickcheck value %v.", f.quickCheck[MComposed]) - } - e |= uint16(c.nTrailingNonStarters) - return e -} - -// decompSet keeps track of unique decompositions, grouped by whether -// the decomposition is followed by a trailing and/or leading CCC. -type decompSet [7]map[string]bool - -const ( - normalDecomp = iota - firstMulti - firstCCC - endMulti - firstLeadingCCC - firstCCCZeroExcept - firstStarterWithNLead - lastDecomp -) - -var cname = []string{"firstMulti", "firstCCC", "endMulti", "firstLeadingCCC", "firstCCCZeroExcept", "firstStarterWithNLead", "lastDecomp"} - -func makeDecompSet() decompSet { - m := decompSet{} - for i := range m { - m[i] = make(map[string]bool) - } - return m -} -func (m *decompSet) insert(key int, s string) { - m[key][s] = true -} - -func printCharInfoTables(w io.Writer) int { - mkstr := func(r rune, f *FormInfo) (int, string) { - d := f.expandedDecomp - s := string([]rune(d)) - if max := 1 << 6; len(s) >= max { - const msg = "%U: too many bytes in decomposition: %d >= %d" - log.Fatalf(msg, r, len(s), max) - } - head := uint8(len(s)) - if f.quickCheck[MComposed] != QCYes { - head |= 0x40 - } - if f.combinesForward { - head |= 0x80 - } - s = string([]byte{head}) + s - - lccc := ccc(d[0]) - tccc := ccc(d[len(d)-1]) - cc := ccc(r) - if cc != 0 && lccc == 0 && tccc == 0 { - log.Fatalf("%U: trailing and leading ccc are 0 for non-zero ccc %d", r, cc) - } - if tccc < lccc && lccc != 0 { - const msg = "%U: lccc (%d) must be <= tcc (%d)" - log.Fatalf(msg, r, lccc, tccc) - } - index := normalDecomp - nTrail := chars[r].nTrailingNonStarters - nLead := chars[r].nLeadingNonStarters - if tccc > 0 || lccc > 0 || nTrail > 0 { - tccc <<= 2 - tccc |= nTrail - s += string([]byte{tccc}) - index = endMulti - for _, r := range d[1:] { - if ccc(r) == 0 { - index = firstCCC - } - } - if lccc > 0 || nLead > 0 { - s += string([]byte{lccc}) - if index == firstCCC { - log.Fatalf("%U: multi-segment decomposition not supported for decompositions with leading CCC != 0", r) - } - index = firstLeadingCCC - } - if cc != lccc { - if cc != 0 { - log.Fatalf("%U: for lccc != ccc, expected ccc to be 0; was %d", r, cc) - } - index = firstCCCZeroExcept - } - } else if len(d) > 1 { - index = firstMulti - } - return index, s - } - - decompSet := makeDecompSet() - const nLeadStr = "\x00\x01" // 0-byte length and tccc with nTrail. - decompSet.insert(firstStarterWithNLead, nLeadStr) - - // Store the uniqued decompositions in a byte buffer, - // preceded by their byte length. - for _, c := range chars { - for _, f := range c.forms { - if len(f.expandedDecomp) == 0 { - continue - } - if f.combinesBackward { - log.Fatalf("%U: combinesBackward and decompose", c.codePoint) - } - index, s := mkstr(c.codePoint, &f) - decompSet.insert(index, s) - } - } - - decompositions := bytes.NewBuffer(make([]byte, 0, 10000)) - size := 0 - positionMap := make(map[string]uint16) - decompositions.WriteString("\000") - fmt.Fprintln(w, "const (") - for i, m := range decompSet { - sa := []string{} - for s := range m { - sa = append(sa, s) - } - sort.Strings(sa) - for _, s := range sa { - p := decompositions.Len() - decompositions.WriteString(s) - positionMap[s] = uint16(p) - } - if cname[i] != "" { - fmt.Fprintf(w, "%s = 0x%X\n", cname[i], decompositions.Len()) - } - } - fmt.Fprintln(w, "maxDecomp = 0x8000") - fmt.Fprintln(w, ")") - b := decompositions.Bytes() - printBytes(w, b, "decomps") - size += len(b) - - varnames := []string{"nfc", "nfkc"} - for i := 0; i < FNumberOfFormTypes; i++ { - trie := triegen.NewTrie(varnames[i]) - - for r, c := range chars { - f := c.forms[i] - d := f.expandedDecomp - if len(d) != 0 { - _, key := mkstr(c.codePoint, &f) - trie.Insert(rune(r), uint64(positionMap[key])) - if c.ccc != ccc(d[0]) { - // We assume the lead ccc of a decomposition !=0 in this case. - if ccc(d[0]) == 0 { - log.Fatalf("Expected leading CCC to be non-zero; ccc is %d", c.ccc) - } - } - } else if c.nLeadingNonStarters > 0 && len(f.expandedDecomp) == 0 && c.ccc == 0 && !f.combinesBackward { - // Handle cases where it can't be detected that the nLead should be equal - // to nTrail. - trie.Insert(c.codePoint, uint64(positionMap[nLeadStr])) - } else if v := makeEntry(&f, &c)<<8 | uint16(c.ccc); v != 0 { - trie.Insert(c.codePoint, uint64(0x8000|v)) - } - } - sz, err := trie.Gen(w, triegen.Compact(&normCompacter{name: varnames[i]})) - if err != nil { - log.Fatal(err) - } - size += sz - } - return size -} - -func contains(sa []string, s string) bool { - for _, a := range sa { - if a == s { - return true - } - } - return false -} - -func makeTables() { - w := &bytes.Buffer{} - - size := 0 - if *tablelist == "" { - return - } - list := strings.Split(*tablelist, ",") - if *tablelist == "all" { - list = []string{"recomp", "info"} - } - - // Compute maximum decomposition size. - max := 0 - for _, c := range chars { - if n := len(string(c.forms[FCompatibility].expandedDecomp)); n > max { - max = n - } - } - fmt.Fprintln(w, `import "sync"`) - fmt.Fprintln(w) - - fmt.Fprintln(w, "const (") - fmt.Fprintln(w, "\t// Version is the Unicode edition from which the tables are derived.") - fmt.Fprintf(w, "\tVersion = %q\n", gen.UnicodeVersion()) - fmt.Fprintln(w) - fmt.Fprintln(w, "\t// MaxTransformChunkSize indicates the maximum number of bytes that Transform") - fmt.Fprintln(w, "\t// may need to write atomically for any Form. Making a destination buffer at") - fmt.Fprintln(w, "\t// least this size ensures that Transform can always make progress and that") - fmt.Fprintln(w, "\t// the user does not need to grow the buffer on an ErrShortDst.") - fmt.Fprintf(w, "\tMaxTransformChunkSize = %d+maxNonStarters*4\n", len(string(0x034F))+max) - fmt.Fprintln(w, ")\n") - - // Print the CCC remap table. - size += len(cccMap) - fmt.Fprintf(w, "var ccc = [%d]uint8{", len(cccMap)) - for i := 0; i < len(cccMap); i++ { - if i%8 == 0 { - fmt.Fprintln(w) - } - fmt.Fprintf(w, "%3d, ", cccMap[uint8(i)]) - } - fmt.Fprintln(w, "\n}\n") - - if contains(list, "info") { - size += printCharInfoTables(w) - } - - if contains(list, "recomp") { - // Note that we use 32 bit keys, instead of 64 bit. - // This clips the bits of three entries, but we know - // this won't cause a collision. The compiler will catch - // any changes made to UnicodeData.txt that introduces - // a collision. - // Note that the recomposition map for NFC and NFKC - // are identical. - - // Recomposition map - nrentries := 0 - for _, c := range chars { - f := c.forms[FCanonical] - if !f.isOneWay && len(f.decomp) > 0 { - nrentries++ - } - } - sz := nrentries * 8 - size += sz - fmt.Fprintf(w, "// recompMap: %d bytes (entries only)\n", sz) - fmt.Fprintln(w, "var recompMap map[uint32]rune") - fmt.Fprintln(w, "var recompMapOnce sync.Once\n") - fmt.Fprintln(w, `const recompMapPacked = "" +`) - var buf [8]byte - for i, c := range chars { - f := c.forms[FCanonical] - d := f.decomp - if !f.isOneWay && len(d) > 0 { - key := uint32(uint16(d[0]))<<16 + uint32(uint16(d[1])) - binary.BigEndian.PutUint32(buf[:4], key) - binary.BigEndian.PutUint32(buf[4:], uint32(i)) - fmt.Fprintf(w, "\t\t%q + // 0x%.8X: 0x%.8X\n", string(buf[:]), key, uint32(i)) - } - } - // hack so we don't have to special case the trailing plus sign - fmt.Fprintf(w, ` ""`) - fmt.Fprintln(w) - } - - fmt.Fprintf(w, "// Total size of tables: %dKB (%d bytes)\n", (size+512)/1024, size) - gen.WriteVersionedGoFile("tables.go", "norm", w.Bytes()) -} - -func printChars() { - if *verbose { - for _, c := range chars { - if !c.isValid() || c.state == SMissing { - continue - } - fmt.Println(c) - } - } -} - -// verifyComputed does various consistency tests. -func verifyComputed() { - for i, c := range chars { - for _, f := range c.forms { - isNo := (f.quickCheck[MDecomposed] == QCNo) - if (len(f.decomp) > 0) != isNo && !isHangul(rune(i)) { - log.Fatalf("%U: NF*D QC must be No if rune decomposes", i) - } - - isMaybe := f.quickCheck[MComposed] == QCMaybe - if f.combinesBackward != isMaybe { - log.Fatalf("%U: NF*C QC must be Maybe if combinesBackward", i) - } - if len(f.decomp) > 0 && f.combinesForward && isMaybe { - log.Fatalf("%U: NF*C QC must be Yes or No if combinesForward and decomposes", i) - } - - if len(f.expandedDecomp) != 0 { - continue - } - if a, b := c.nLeadingNonStarters > 0, (c.ccc > 0 || f.combinesBackward); a != b { - // We accept these runes to be treated differently (it only affects - // segment breaking in iteration, most likely on improper use), but - // reconsider if more characters are added. - // U+FF9E HALFWIDTH KATAKANA VOICED SOUND MARK;Lm;0;L; 3099;;;;N;;;;; - // U+FF9F HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK;Lm;0;L; 309A;;;;N;;;;; - // U+3133 HANGUL LETTER KIYEOK-SIOS;Lo;0;L; 11AA;;;;N;HANGUL LETTER GIYEOG SIOS;;;; - // U+318E HANGUL LETTER ARAEAE;Lo;0;L; 11A1;;;;N;HANGUL LETTER ALAE AE;;;; - // U+FFA3 HALFWIDTH HANGUL LETTER KIYEOK-SIOS;Lo;0;L; 3133;;;;N;HALFWIDTH HANGUL LETTER GIYEOG SIOS;;;; - // U+FFDC HALFWIDTH HANGUL LETTER I;Lo;0;L; 3163;;;;N;;;;; - if i != 0xFF9E && i != 0xFF9F && !(0x3133 <= i && i <= 0x318E) && !(0xFFA3 <= i && i <= 0xFFDC) { - log.Fatalf("%U: nLead was %v; want %v", i, a, b) - } - } - } - nfc := c.forms[FCanonical] - nfkc := c.forms[FCompatibility] - if nfc.combinesBackward != nfkc.combinesBackward { - log.Fatalf("%U: Cannot combine combinesBackward\n", c.codePoint) - } - } -} - -// Use values in DerivedNormalizationProps.txt to compare against the -// values we computed. -// DerivedNormalizationProps.txt has form: -// 00C0..00C5 ; NFD_QC; N # ... -// 0374 ; NFD_QC; N # ... -// See https://unicode.org/reports/tr44/ for full explanation -func testDerived() { - f := gen.OpenUCDFile("DerivedNormalizationProps.txt") - defer f.Close() - p := ucd.New(f) - for p.Next() { - r := p.Rune(0) - c := &chars[r] - - var ftype, mode int - qt := p.String(1) - switch qt { - case "NFC_QC": - ftype, mode = FCanonical, MComposed - case "NFD_QC": - ftype, mode = FCanonical, MDecomposed - case "NFKC_QC": - ftype, mode = FCompatibility, MComposed - case "NFKD_QC": - ftype, mode = FCompatibility, MDecomposed - default: - continue - } - var qr QCResult - switch p.String(2) { - case "Y": - qr = QCYes - case "N": - qr = QCNo - case "M": - qr = QCMaybe - default: - log.Fatalf(`Unexpected quick check value "%s"`, p.String(2)) - } - if got := c.forms[ftype].quickCheck[mode]; got != qr { - log.Printf("%U: FAILED %s (was %v need %v)\n", r, qt, got, qr) - } - c.forms[ftype].verified[mode] = true - } - if err := p.Err(); err != nil { - log.Fatal(err) - } - // Any unspecified value must be QCYes. Verify this. - for i, c := range chars { - for j, fd := range c.forms { - for k, qr := range fd.quickCheck { - if !fd.verified[k] && qr != QCYes { - m := "%U: FAIL F:%d M:%d (was %v need Yes) %s\n" - log.Printf(m, i, j, k, qr, c.name) - } - } - } - } -} - -var testHeader = `const ( - Yes = iota - No - Maybe -) - -type formData struct { - qc uint8 - combinesForward bool - decomposition string -} - -type runeData struct { - r rune - ccc uint8 - nLead uint8 - nTrail uint8 - f [2]formData // 0: canonical; 1: compatibility -} - -func f(qc uint8, cf bool, dec string) [2]formData { - return [2]formData{{qc, cf, dec}, {qc, cf, dec}} -} - -func g(qc, qck uint8, cf, cfk bool, d, dk string) [2]formData { - return [2]formData{{qc, cf, d}, {qck, cfk, dk}} -} - -var testData = []runeData{ -` - -func printTestdata() { - type lastInfo struct { - ccc uint8 - nLead uint8 - nTrail uint8 - f string - } - - last := lastInfo{} - w := &bytes.Buffer{} - fmt.Fprintf(w, testHeader) - for r, c := range chars { - f := c.forms[FCanonical] - qc, cf, d := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp) - f = c.forms[FCompatibility] - qck, cfk, dk := f.quickCheck[MComposed], f.combinesForward, string(f.expandedDecomp) - s := "" - if d == dk && qc == qck && cf == cfk { - s = fmt.Sprintf("f(%s, %v, %q)", qc, cf, d) - } else { - s = fmt.Sprintf("g(%s, %s, %v, %v, %q, %q)", qc, qck, cf, cfk, d, dk) - } - current := lastInfo{c.ccc, c.nLeadingNonStarters, c.nTrailingNonStarters, s} - if last != current { - fmt.Fprintf(w, "\t{0x%x, %d, %d, %d, %s},\n", r, c.origCCC, c.nLeadingNonStarters, c.nTrailingNonStarters, s) - last = current - } - } - fmt.Fprintln(w, "}") - gen.WriteVersionedGoFile("data_test.go", "norm", w.Bytes()) -} diff --git a/vendor/golang.org/x/text/unicode/norm/triegen.go b/vendor/golang.org/x/text/unicode/norm/triegen.go deleted file mode 100644 index 45d711900d..0000000000 --- a/vendor/golang.org/x/text/unicode/norm/triegen.go +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// Trie table generator. -// Used by make*tables tools to generate a go file with trie data structures -// for mapping UTF-8 to a 16-bit value. All but the last byte in a UTF-8 byte -// sequence are used to lookup offsets in the index table to be used for the -// next byte. The last byte is used to index into a table with 16-bit values. - -package main - -import ( - "fmt" - "io" -) - -const maxSparseEntries = 16 - -type normCompacter struct { - sparseBlocks [][]uint64 - sparseOffset []uint16 - sparseCount int - name string -} - -func mostFrequentStride(a []uint64) int { - counts := make(map[int]int) - var v int - for _, x := range a { - if stride := int(x) - v; v != 0 && stride >= 0 { - counts[stride]++ - } - v = int(x) - } - var maxs, maxc int - for stride, cnt := range counts { - if cnt > maxc || (cnt == maxc && stride < maxs) { - maxs, maxc = stride, cnt - } - } - return maxs -} - -func countSparseEntries(a []uint64) int { - stride := mostFrequentStride(a) - var v, count int - for _, tv := range a { - if int(tv)-v != stride { - if tv != 0 { - count++ - } - } - v = int(tv) - } - return count -} - -func (c *normCompacter) Size(v []uint64) (sz int, ok bool) { - if n := countSparseEntries(v); n <= maxSparseEntries { - return (n+1)*4 + 2, true - } - return 0, false -} - -func (c *normCompacter) Store(v []uint64) uint32 { - h := uint32(len(c.sparseOffset)) - c.sparseBlocks = append(c.sparseBlocks, v) - c.sparseOffset = append(c.sparseOffset, uint16(c.sparseCount)) - c.sparseCount += countSparseEntries(v) + 1 - return h -} - -func (c *normCompacter) Handler() string { - return c.name + "Sparse.lookup" -} - -func (c *normCompacter) Print(w io.Writer) (retErr error) { - p := func(f string, x ...interface{}) { - if _, err := fmt.Fprintf(w, f, x...); retErr == nil && err != nil { - retErr = err - } - } - - ls := len(c.sparseBlocks) - p("// %sSparseOffset: %d entries, %d bytes\n", c.name, ls, ls*2) - p("var %sSparseOffset = %#v\n\n", c.name, c.sparseOffset) - - ns := c.sparseCount - p("// %sSparseValues: %d entries, %d bytes\n", c.name, ns, ns*4) - p("var %sSparseValues = [%d]valueRange {", c.name, ns) - for i, b := range c.sparseBlocks { - p("\n// Block %#x, offset %#x", i, c.sparseOffset[i]) - var v int - stride := mostFrequentStride(b) - n := countSparseEntries(b) - p("\n{value:%#04x,lo:%#02x},", stride, uint8(n)) - for i, nv := range b { - if int(nv)-v != stride { - if v != 0 { - p(",hi:%#02x},", 0x80+i-1) - } - if nv != 0 { - p("\n{value:%#04x,lo:%#02x", nv, 0x80+i) - } - } - v = int(nv) - } - if v != 0 { - p(",hi:%#02x},", 0x80+len(b)-1) - } - } - p("\n}\n\n") - return -} diff --git a/vendor/modules.txt b/vendor/modules.txt index 86fbe7469a..fbd085ef60 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,11 +1,11 @@ # cloud.google.com/go v0.45.1 -cloud.google.com/go/storage +cloud.google.com/go/compute/metadata cloud.google.com/go/iam cloud.google.com/go/internal cloud.google.com/go/internal/optional cloud.google.com/go/internal/trace cloud.google.com/go/internal/version -cloud.google.com/go/compute/metadata +cloud.google.com/go/storage # github.com/agext/levenshtein v1.2.2 github.com/agext/levenshtein # github.com/apparentlymart/go-cidr v1.0.1 @@ -16,42 +16,42 @@ github.com/apparentlymart/go-textseg/textseg github.com/armon/go-radix # github.com/aws/aws-sdk-go v1.25.3 github.com/aws/aws-sdk-go/aws -github.com/aws/aws-sdk-go/aws/credentials -github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds -github.com/aws/aws-sdk-go/aws/ec2metadata -github.com/aws/aws-sdk-go/aws/session -github.com/aws/aws-sdk-go/service/s3 github.com/aws/aws-sdk-go/aws/awserr -github.com/aws/aws-sdk-go/aws/endpoints -github.com/aws/aws-sdk-go/internal/sdkio -github.com/aws/aws-sdk-go/internal/ini -github.com/aws/aws-sdk-go/internal/shareddefaults +github.com/aws/aws-sdk-go/aws/awsutil github.com/aws/aws-sdk-go/aws/client -github.com/aws/aws-sdk-go/aws/request -github.com/aws/aws-sdk-go/internal/sdkuri github.com/aws/aws-sdk-go/aws/client/metadata github.com/aws/aws-sdk-go/aws/corehandlers +github.com/aws/aws-sdk-go/aws/credentials +github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds +github.com/aws/aws-sdk-go/aws/credentials/endpointcreds github.com/aws/aws-sdk-go/aws/credentials/processcreds github.com/aws/aws-sdk-go/aws/credentials/stscreds github.com/aws/aws-sdk-go/aws/csm github.com/aws/aws-sdk-go/aws/defaults -github.com/aws/aws-sdk-go/aws/awsutil +github.com/aws/aws-sdk-go/aws/ec2metadata +github.com/aws/aws-sdk-go/aws/endpoints +github.com/aws/aws-sdk-go/aws/request +github.com/aws/aws-sdk-go/aws/session github.com/aws/aws-sdk-go/aws/signer/v4 +github.com/aws/aws-sdk-go/internal/ini github.com/aws/aws-sdk-go/internal/s3err +github.com/aws/aws-sdk-go/internal/sdkio +github.com/aws/aws-sdk-go/internal/sdkmath +github.com/aws/aws-sdk-go/internal/sdkrand +github.com/aws/aws-sdk-go/internal/sdkuri +github.com/aws/aws-sdk-go/internal/shareddefaults github.com/aws/aws-sdk-go/private/protocol github.com/aws/aws-sdk-go/private/protocol/eventstream github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi +github.com/aws/aws-sdk-go/private/protocol/json/jsonutil +github.com/aws/aws-sdk-go/private/protocol/query +github.com/aws/aws-sdk-go/private/protocol/query/queryutil github.com/aws/aws-sdk-go/private/protocol/rest github.com/aws/aws-sdk-go/private/protocol/restxml github.com/aws/aws-sdk-go/private/protocol/xml/xmlutil -github.com/aws/aws-sdk-go/internal/sdkrand +github.com/aws/aws-sdk-go/service/s3 github.com/aws/aws-sdk-go/service/sts github.com/aws/aws-sdk-go/service/sts/stsiface -github.com/aws/aws-sdk-go/aws/credentials/endpointcreds -github.com/aws/aws-sdk-go/internal/sdkmath -github.com/aws/aws-sdk-go/private/protocol/query -github.com/aws/aws-sdk-go/private/protocol/json/jsonutil -github.com/aws/aws-sdk-go/private/protocol/query/queryutil # github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d github.com/bgentry/go-netrc/netrc # github.com/bgentry/speakeasy v0.1.0 @@ -62,11 +62,11 @@ github.com/davecgh/go-spew/spew github.com/fatih/color # github.com/golang/protobuf v1.3.2 github.com/golang/protobuf/proto +github.com/golang/protobuf/protoc-gen-go/descriptor github.com/golang/protobuf/ptypes github.com/golang/protobuf/ptypes/any github.com/golang/protobuf/ptypes/duration github.com/golang/protobuf/ptypes/timestamp -github.com/golang/protobuf/protoc-gen-go/descriptor # github.com/google/go-cmp v0.3.1 github.com/google/go-cmp/cmp github.com/google/go-cmp/cmp/internal/diff @@ -85,7 +85,7 @@ github.com/googleapis/gax-go/v2 github.com/hashicorp/errwrap # github.com/hashicorp/go-cleanhttp v0.5.1 github.com/hashicorp/go-cleanhttp -# github.com/hashicorp/go-getter v1.4.0 +# github.com/hashicorp/go-getter v1.4.2-0.20200106182914-9813cbd4eb02 github.com/hashicorp/go-getter github.com/hashicorp/go-getter/helper/url # github.com/hashicorp/go-hclog v0.9.2 @@ -107,88 +107,91 @@ github.com/hashicorp/golang-lru/simplelru github.com/hashicorp/hcl github.com/hashicorp/hcl/hcl/ast github.com/hashicorp/hcl/hcl/parser -github.com/hashicorp/hcl/hcl/token -github.com/hashicorp/hcl/json/parser github.com/hashicorp/hcl/hcl/scanner github.com/hashicorp/hcl/hcl/strconv +github.com/hashicorp/hcl/hcl/token +github.com/hashicorp/hcl/json/parser github.com/hashicorp/hcl/json/scanner github.com/hashicorp/hcl/json/token -# github.com/hashicorp/hcl/v2 v2.0.0 +# github.com/hashicorp/hcl/v2 v2.3.0 github.com/hashicorp/hcl/v2 -github.com/hashicorp/hcl/v2/hcldec -github.com/hashicorp/hcl/v2/hclsyntax +github.com/hashicorp/hcl/v2/ext/customdecode +github.com/hashicorp/hcl/v2/ext/dynblock github.com/hashicorp/hcl/v2/ext/typeexpr github.com/hashicorp/hcl/v2/gohcl +github.com/hashicorp/hcl/v2/hcldec +github.com/hashicorp/hcl/v2/hcled github.com/hashicorp/hcl/v2/hclparse -github.com/hashicorp/hcl/v2/ext/dynblock +github.com/hashicorp/hcl/v2/hclsyntax github.com/hashicorp/hcl/v2/hclwrite github.com/hashicorp/hcl/v2/json -github.com/hashicorp/hcl/v2/hcled # github.com/hashicorp/logutils v1.0.0 github.com/hashicorp/logutils -# github.com/hashicorp/terraform-config-inspect v0.0.0-20191115094559-17f92b0546e8 +# github.com/hashicorp/terraform v0.12.24 +github.com/hashicorp/terraform/helper/logging +# github.com/hashicorp/terraform-config-inspect v0.0.0-20191212124732-c6ae6269b9d7 github.com/hashicorp/terraform-config-inspect/tfconfig # github.com/hashicorp/terraform-json v0.4.0 github.com/hashicorp/terraform-json # github.com/hashicorp/terraform-plugin-sdk v1.7.0 -github.com/hashicorp/terraform-plugin-sdk/plugin +github.com/hashicorp/terraform-plugin-sdk/acctest +github.com/hashicorp/terraform-plugin-sdk/helper/acctest +github.com/hashicorp/terraform-plugin-sdk/helper/hashcode github.com/hashicorp/terraform-plugin-sdk/helper/logging +github.com/hashicorp/terraform-plugin-sdk/helper/resource github.com/hashicorp/terraform-plugin-sdk/helper/schema -github.com/hashicorp/terraform-plugin-sdk/helper/validation -github.com/hashicorp/terraform-plugin-sdk/terraform -github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema -github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin -github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert -github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery -github.com/hashicorp/terraform-plugin-sdk/internal/providers -github.com/hashicorp/terraform-plugin-sdk/internal/provisioners -github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5 -github.com/hashicorp/terraform-plugin-sdk/helper/hashcode -github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim -github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags github.com/hashicorp/terraform-plugin-sdk/helper/structure +github.com/hashicorp/terraform-plugin-sdk/helper/validation +github.com/hashicorp/terraform-plugin-sdk/httpclient github.com/hashicorp/terraform-plugin-sdk/internal/addrs +github.com/hashicorp/terraform-plugin-sdk/internal/command/format github.com/hashicorp/terraform-plugin-sdk/internal/configs +github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload +github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema +github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim github.com/hashicorp/terraform-plugin-sdk/internal/dag +github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig +github.com/hashicorp/terraform-plugin-sdk/internal/flatmap +github.com/hashicorp/terraform-plugin-sdk/internal/helper/config github.com/hashicorp/terraform-plugin-sdk/internal/helper/didyoumean +github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin +github.com/hashicorp/terraform-plugin-sdk/internal/httpclient +github.com/hashicorp/terraform-plugin-sdk/internal/initwd github.com/hashicorp/terraform-plugin-sdk/internal/lang +github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr +github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs +github.com/hashicorp/terraform-plugin-sdk/internal/modsdir github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps github.com/hashicorp/terraform-plugin-sdk/internal/plans github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange -github.com/hashicorp/terraform-plugin-sdk/internal/states -github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile -github.com/hashicorp/terraform-plugin-sdk/internal/version -github.com/hashicorp/terraform-plugin-sdk/helper/acctest -github.com/hashicorp/terraform-plugin-sdk/helper/resource -github.com/hashicorp/terraform-plugin-sdk/internal/httpclient +github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert +github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery +github.com/hashicorp/terraform-plugin-sdk/internal/providers +github.com/hashicorp/terraform-plugin-sdk/internal/provisioners github.com/hashicorp/terraform-plugin-sdk/internal/registry github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc github.com/hashicorp/terraform-plugin-sdk/internal/registry/response -github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr -github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs -github.com/hashicorp/terraform-plugin-sdk/acctest -github.com/hashicorp/terraform-plugin-sdk/internal/command/format -github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload -github.com/hashicorp/terraform-plugin-sdk/internal/helper/config -github.com/hashicorp/terraform-plugin-sdk/internal/initwd -github.com/hashicorp/terraform-plugin-sdk/httpclient -github.com/hashicorp/terraform-plugin-sdk/internal/modsdir -github.com/hashicorp/terraform-plugin-sdk/internal/flatmap -github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig +github.com/hashicorp/terraform-plugin-sdk/internal/states +github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile +github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags +github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5 +github.com/hashicorp/terraform-plugin-sdk/internal/version github.com/hashicorp/terraform-plugin-sdk/meta +github.com/hashicorp/terraform-plugin-sdk/plugin +github.com/hashicorp/terraform-plugin-sdk/terraform # github.com/hashicorp/terraform-plugin-test v1.2.0 github.com/hashicorp/terraform-plugin-test # github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596 -github.com/hashicorp/terraform-svchost/disco github.com/hashicorp/terraform-svchost github.com/hashicorp/terraform-svchost/auth +github.com/hashicorp/terraform-svchost/disco # github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d github.com/hashicorp/yamux # github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af github.com/jmespath/go-jmespath # github.com/kylelemons/godebug v1.1.0 -github.com/kylelemons/godebug/pretty github.com/kylelemons/godebug/diff +github.com/kylelemons/godebug/pretty # github.com/mattn/go-colorable v0.1.1 github.com/mattn/go-colorable # github.com/mattn/go-isatty v0.0.5 @@ -213,137 +216,145 @@ github.com/mitchellh/reflectwalk github.com/oklog/run # github.com/posener/complete v1.2.1 github.com/posener/complete -github.com/posener/complete/cmd/install github.com/posener/complete/cmd +github.com/posener/complete/cmd/install github.com/posener/complete/match +# github.com/shurcooL/githubv4 v0.0.0-20191127044304-8f68eb5628d0 +github.com/shurcooL/githubv4 +# github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f +github.com/shurcooL/graphql +github.com/shurcooL/graphql/ident +github.com/shurcooL/graphql/internal/jsonutil # github.com/spf13/afero v1.2.2 github.com/spf13/afero github.com/spf13/afero/mem # github.com/ulikunitz/xz v0.5.5 github.com/ulikunitz/xz +github.com/ulikunitz/xz/internal/hash github.com/ulikunitz/xz/internal/xlog github.com/ulikunitz/xz/lzma -github.com/ulikunitz/xz/internal/hash # github.com/vmihailenco/msgpack v4.0.1+incompatible github.com/vmihailenco/msgpack github.com/vmihailenco/msgpack/codes # github.com/zclconf/go-cty v1.2.1 github.com/zclconf/go-cty/cty -github.com/zclconf/go-cty/cty/msgpack github.com/zclconf/go-cty/cty/convert -github.com/zclconf/go-cty/cty/json -github.com/zclconf/go-cty/cty/gocty -github.com/zclconf/go-cty/cty/set github.com/zclconf/go-cty/cty/function github.com/zclconf/go-cty/cty/function/stdlib +github.com/zclconf/go-cty/cty/gocty +github.com/zclconf/go-cty/cty/json +github.com/zclconf/go-cty/cty/msgpack +github.com/zclconf/go-cty/cty/set # github.com/zclconf/go-cty-yaml v1.0.1 github.com/zclconf/go-cty-yaml # go.opencensus.io v0.22.0 -go.opencensus.io/trace -go.opencensus.io/plugin/ochttp +go.opencensus.io go.opencensus.io/internal -go.opencensus.io/trace/internal -go.opencensus.io/trace/tracestate +go.opencensus.io/internal/tagencoding +go.opencensus.io/metric/metricdata +go.opencensus.io/metric/metricproducer +go.opencensus.io/plugin/ochttp go.opencensus.io/plugin/ochttp/propagation/b3 +go.opencensus.io/resource go.opencensus.io/stats +go.opencensus.io/stats/internal go.opencensus.io/stats/view go.opencensus.io/tag +go.opencensus.io/trace +go.opencensus.io/trace/internal go.opencensus.io/trace/propagation -go.opencensus.io -go.opencensus.io/metric/metricdata -go.opencensus.io/stats/internal -go.opencensus.io/internal/tagencoding -go.opencensus.io/metric/metricproducer -go.opencensus.io/resource +go.opencensus.io/trace/tracestate # golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d -golang.org/x/crypto/nacl/box -golang.org/x/crypto/openpgp +golang.org/x/crypto/bcrypt golang.org/x/crypto/blake2b +golang.org/x/crypto/blowfish +golang.org/x/crypto/cast5 +golang.org/x/crypto/chacha20 golang.org/x/crypto/curve25519 +golang.org/x/crypto/ed25519 +golang.org/x/crypto/ed25519/internal/edwards25519 +golang.org/x/crypto/internal/subtle +golang.org/x/crypto/nacl/box golang.org/x/crypto/nacl/secretbox -golang.org/x/crypto/salsa20/salsa +golang.org/x/crypto/openpgp golang.org/x/crypto/openpgp/armor +golang.org/x/crypto/openpgp/elgamal golang.org/x/crypto/openpgp/errors golang.org/x/crypto/openpgp/packet golang.org/x/crypto/openpgp/s2k -golang.org/x/crypto/internal/subtle golang.org/x/crypto/poly1305 +golang.org/x/crypto/salsa20/salsa golang.org/x/crypto/ssh -golang.org/x/crypto/cast5 -golang.org/x/crypto/openpgp/elgamal -golang.org/x/crypto/bcrypt -golang.org/x/crypto/chacha20 -golang.org/x/crypto/ed25519 golang.org/x/crypto/ssh/internal/bcrypt_pbkdf -golang.org/x/crypto/blowfish -golang.org/x/crypto/ed25519/internal/edwards25519 # golang.org/x/net v0.0.0-20191009170851-d66e71096ffb golang.org/x/net/context -golang.org/x/net/trace golang.org/x/net/context/ctxhttp -golang.org/x/net/internal/timeseries +golang.org/x/net/http/httpguts golang.org/x/net/http2 golang.org/x/net/http2/hpack golang.org/x/net/idna -golang.org/x/net/http/httpguts +golang.org/x/net/internal/timeseries +golang.org/x/net/trace # golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d golang.org/x/oauth2 -golang.org/x/oauth2/internal golang.org/x/oauth2/google +golang.org/x/oauth2/internal golang.org/x/oauth2/jws golang.org/x/oauth2/jwt # golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa golang.org/x/sys/cpu golang.org/x/sys/unix # golang.org/x/text v0.3.2 -golang.org/x/text/unicode/norm -golang.org/x/text/transform golang.org/x/text/secure/bidirule +golang.org/x/text/transform golang.org/x/text/unicode/bidi +golang.org/x/text/unicode/norm # google.golang.org/api v0.9.0 -google.golang.org/api/iterator +google.golang.org/api/gensupport google.golang.org/api/googleapi +google.golang.org/api/googleapi/internal/uritemplates +google.golang.org/api/googleapi/transport +google.golang.org/api/internal +google.golang.org/api/iterator google.golang.org/api/option google.golang.org/api/storage/v1 google.golang.org/api/transport/http -google.golang.org/api/googleapi/internal/uritemplates -google.golang.org/api/internal -google.golang.org/api/gensupport -google.golang.org/api/googleapi/transport google.golang.org/api/transport/http/internal/propagation # google.golang.org/appengine v1.6.1 -google.golang.org/appengine/urlfetch -google.golang.org/appengine/datastore -google.golang.org/appengine/internal -google.golang.org/appengine/internal/urlfetch google.golang.org/appengine +google.golang.org/appengine/datastore google.golang.org/appengine/datastore/internal/cloudkey -google.golang.org/appengine/internal/datastore +google.golang.org/appengine/datastore/internal/cloudpb +google.golang.org/appengine/internal +google.golang.org/appengine/internal/app_identity google.golang.org/appengine/internal/base +google.golang.org/appengine/internal/datastore google.golang.org/appengine/internal/log -google.golang.org/appengine/internal/remote_api -google.golang.org/appengine/internal/app_identity google.golang.org/appengine/internal/modules -google.golang.org/appengine/datastore/internal/cloudpb +google.golang.org/appengine/internal/remote_api +google.golang.org/appengine/internal/urlfetch +google.golang.org/appengine/urlfetch # google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 -google.golang.org/genproto/googleapis/rpc/status +google.golang.org/genproto/googleapis/api/annotations google.golang.org/genproto/googleapis/iam/v1 google.golang.org/genproto/googleapis/rpc/code -google.golang.org/genproto/googleapis/api/annotations +google.golang.org/genproto/googleapis/rpc/status google.golang.org/genproto/googleapis/type/expr # google.golang.org/grpc v1.23.0 google.golang.org/grpc -google.golang.org/grpc/credentials -google.golang.org/grpc/health -google.golang.org/grpc/health/grpc_health_v1 -google.golang.org/grpc/codes -google.golang.org/grpc/status google.golang.org/grpc/balancer +google.golang.org/grpc/balancer/base google.golang.org/grpc/balancer/roundrobin +google.golang.org/grpc/binarylog/grpc_binarylog_v1 +google.golang.org/grpc/codes google.golang.org/grpc/connectivity +google.golang.org/grpc/credentials +google.golang.org/grpc/credentials/internal google.golang.org/grpc/encoding google.golang.org/grpc/encoding/proto google.golang.org/grpc/grpclog +google.golang.org/grpc/health +google.golang.org/grpc/health/grpc_health_v1 google.golang.org/grpc/internal google.golang.org/grpc/internal/backoff google.golang.org/grpc/internal/balancerload @@ -352,6 +363,7 @@ google.golang.org/grpc/internal/channelz google.golang.org/grpc/internal/envconfig google.golang.org/grpc/internal/grpcrand google.golang.org/grpc/internal/grpcsync +google.golang.org/grpc/internal/syscall google.golang.org/grpc/internal/transport google.golang.org/grpc/keepalive google.golang.org/grpc/metadata @@ -362,9 +374,6 @@ google.golang.org/grpc/resolver/dns google.golang.org/grpc/resolver/passthrough google.golang.org/grpc/serviceconfig google.golang.org/grpc/stats +google.golang.org/grpc/status google.golang.org/grpc/tap google.golang.org/grpc/test/bufconn -google.golang.org/grpc/credentials/internal -google.golang.org/grpc/balancer/base -google.golang.org/grpc/binarylog/grpc_binarylog_v1 -google.golang.org/grpc/internal/syscall From a73dbfa36cecf9f93b4aa155e8825d941a412578 Mon Sep 17 00:00:00 2001 From: Patrick Marabeas Date: Sun, 12 Apr 2020 20:01:32 +1000 Subject: [PATCH 4/4] Update acceptance testing documentation with GITHUB_BASE_URL env var Update acceptance testing documentation to include the exporting of the GITHUB_BASE_URL environment variable. --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index fcd053deea..e9add575f2 100644 --- a/README.md +++ b/README.md @@ -69,6 +69,8 @@ Acceptance test prerequisites ----------------------------- In order to successfully run the full suite of acceptance tests, you will need to have the following: +export `https://api.github.com/` as the environment variable `GITHUB_BASE_URL`. + ### GitHub personal access token You will need to create a [personal access token](https://help.github.com/en/articles/creating-a-personal-access-token-for-the-command-line) for testing. It will need to have the following scopes selected: